xref: /OK3568_Linux_fs/kernel/drivers/gpu/drm/rockchip/rockchip_drm_logo.c (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun // SPDX-License-Identifier: (GPL-2.0+ OR MIT)
2*4882a593Smuzhiyun /*
3*4882a593Smuzhiyun  * Copyright (c) 2021 Rockchip Electronics Co., Ltd.
4*4882a593Smuzhiyun  * Author: Sandy Huang <hjc@rock-chips.com>
5*4882a593Smuzhiyun  */
6*4882a593Smuzhiyun #include <linux/memblock.h>
7*4882a593Smuzhiyun #include <linux/of_address.h>
8*4882a593Smuzhiyun #include <linux/of_platform.h>
9*4882a593Smuzhiyun #include <linux/clk.h>
10*4882a593Smuzhiyun #include <linux/clk-provider.h>
11*4882a593Smuzhiyun #include <linux/iommu.h>
12*4882a593Smuzhiyun 
13*4882a593Smuzhiyun #include <drm/drm_atomic_uapi.h>
14*4882a593Smuzhiyun #include <drm/drm_drv.h>
15*4882a593Smuzhiyun #include <drm/drm_gem_cma_helper.h>
16*4882a593Smuzhiyun #include <drm/drm_of.h>
17*4882a593Smuzhiyun #include <drm/drm_probe_helper.h>
18*4882a593Smuzhiyun 
19*4882a593Smuzhiyun #include "rockchip_drm_drv.h"
20*4882a593Smuzhiyun #include "rockchip_drm_fb.h"
21*4882a593Smuzhiyun #include "rockchip_drm_logo.h"
22*4882a593Smuzhiyun 
is_support_hotplug(uint32_t output_type)23*4882a593Smuzhiyun static bool is_support_hotplug(uint32_t output_type)
24*4882a593Smuzhiyun {
25*4882a593Smuzhiyun 	switch (output_type) {
26*4882a593Smuzhiyun 	case DRM_MODE_CONNECTOR_DVII:
27*4882a593Smuzhiyun 	case DRM_MODE_CONNECTOR_DVID:
28*4882a593Smuzhiyun 	case DRM_MODE_CONNECTOR_DVIA:
29*4882a593Smuzhiyun 	case DRM_MODE_CONNECTOR_DisplayPort:
30*4882a593Smuzhiyun 	case DRM_MODE_CONNECTOR_HDMIA:
31*4882a593Smuzhiyun 	case DRM_MODE_CONNECTOR_HDMIB:
32*4882a593Smuzhiyun 	case DRM_MODE_CONNECTOR_TV:
33*4882a593Smuzhiyun 		return true;
34*4882a593Smuzhiyun 	default:
35*4882a593Smuzhiyun 		return false;
36*4882a593Smuzhiyun 	}
37*4882a593Smuzhiyun }
38*4882a593Smuzhiyun 
39*4882a593Smuzhiyun static struct drm_crtc *
find_crtc_by_node(struct drm_device * drm_dev,struct device_node * node)40*4882a593Smuzhiyun find_crtc_by_node(struct drm_device *drm_dev, struct device_node *node)
41*4882a593Smuzhiyun {
42*4882a593Smuzhiyun 	struct device_node *np_crtc;
43*4882a593Smuzhiyun 	struct drm_crtc *crtc;
44*4882a593Smuzhiyun 
45*4882a593Smuzhiyun 	np_crtc = of_get_parent(node);
46*4882a593Smuzhiyun 	if (!np_crtc || !of_device_is_available(np_crtc))
47*4882a593Smuzhiyun 		return NULL;
48*4882a593Smuzhiyun 
49*4882a593Smuzhiyun 	drm_for_each_crtc(crtc, drm_dev) {
50*4882a593Smuzhiyun 		if (crtc->port == np_crtc)
51*4882a593Smuzhiyun 			return crtc;
52*4882a593Smuzhiyun 	}
53*4882a593Smuzhiyun 
54*4882a593Smuzhiyun 	return NULL;
55*4882a593Smuzhiyun }
56*4882a593Smuzhiyun 
57*4882a593Smuzhiyun static struct rockchip_drm_sub_dev *
find_sub_dev_by_node(struct drm_device * drm_dev,struct device_node * node)58*4882a593Smuzhiyun find_sub_dev_by_node(struct drm_device *drm_dev, struct device_node *node)
59*4882a593Smuzhiyun {
60*4882a593Smuzhiyun 	struct device_node *np_connector;
61*4882a593Smuzhiyun 	struct rockchip_drm_sub_dev *sub_dev;
62*4882a593Smuzhiyun 
63*4882a593Smuzhiyun 	np_connector = of_graph_get_remote_port_parent(node);
64*4882a593Smuzhiyun 	if (!np_connector || !of_device_is_available(np_connector))
65*4882a593Smuzhiyun 		return NULL;
66*4882a593Smuzhiyun 
67*4882a593Smuzhiyun 	sub_dev = rockchip_drm_get_sub_dev(np_connector);
68*4882a593Smuzhiyun 	if (!sub_dev)
69*4882a593Smuzhiyun 		return NULL;
70*4882a593Smuzhiyun 
71*4882a593Smuzhiyun 	return sub_dev;
72*4882a593Smuzhiyun }
73*4882a593Smuzhiyun 
74*4882a593Smuzhiyun static struct rockchip_drm_sub_dev *
find_sub_dev_by_bridge(struct drm_device * drm_dev,struct device_node * node)75*4882a593Smuzhiyun find_sub_dev_by_bridge(struct drm_device *drm_dev, struct device_node *node)
76*4882a593Smuzhiyun {
77*4882a593Smuzhiyun 	struct device_node *np_encoder, *np_connector = NULL;
78*4882a593Smuzhiyun 	struct rockchip_drm_sub_dev *sub_dev = NULL;
79*4882a593Smuzhiyun 	struct device_node *port, *endpoint;
80*4882a593Smuzhiyun 
81*4882a593Smuzhiyun 	np_encoder = of_graph_get_remote_port_parent(node);
82*4882a593Smuzhiyun 	if (!np_encoder || !of_device_is_available(np_encoder))
83*4882a593Smuzhiyun 		goto err_put_encoder;
84*4882a593Smuzhiyun 
85*4882a593Smuzhiyun 	port = of_graph_get_port_by_id(np_encoder, 1);
86*4882a593Smuzhiyun 	if (!port) {
87*4882a593Smuzhiyun 		dev_err(drm_dev->dev, "can't found port point!\n");
88*4882a593Smuzhiyun 		goto err_put_encoder;
89*4882a593Smuzhiyun 	}
90*4882a593Smuzhiyun 
91*4882a593Smuzhiyun 	for_each_child_of_node(port, endpoint) {
92*4882a593Smuzhiyun 		np_connector = of_graph_get_remote_port_parent(endpoint);
93*4882a593Smuzhiyun 		if (!np_connector) {
94*4882a593Smuzhiyun 			dev_err(drm_dev->dev,
95*4882a593Smuzhiyun 				"can't found connector node, please init!\n");
96*4882a593Smuzhiyun 			goto err_put_port;
97*4882a593Smuzhiyun 		}
98*4882a593Smuzhiyun 		if (!of_device_is_available(np_connector)) {
99*4882a593Smuzhiyun 			of_node_put(np_connector);
100*4882a593Smuzhiyun 			np_connector = NULL;
101*4882a593Smuzhiyun 			continue;
102*4882a593Smuzhiyun 		} else {
103*4882a593Smuzhiyun 			break;
104*4882a593Smuzhiyun 		}
105*4882a593Smuzhiyun 	}
106*4882a593Smuzhiyun 	if (!np_connector) {
107*4882a593Smuzhiyun 		dev_err(drm_dev->dev, "can't found available connector node!\n");
108*4882a593Smuzhiyun 		goto err_put_port;
109*4882a593Smuzhiyun 	}
110*4882a593Smuzhiyun 
111*4882a593Smuzhiyun sub_dev = rockchip_drm_get_sub_dev(np_connector);
112*4882a593Smuzhiyun 	if (!sub_dev)
113*4882a593Smuzhiyun 		goto err_put_port;
114*4882a593Smuzhiyun 
115*4882a593Smuzhiyun 	of_node_put(np_connector);
116*4882a593Smuzhiyun err_put_port:
117*4882a593Smuzhiyun 	of_node_put(port);
118*4882a593Smuzhiyun err_put_encoder:
119*4882a593Smuzhiyun 	of_node_put(np_encoder);
120*4882a593Smuzhiyun 
121*4882a593Smuzhiyun 	return sub_dev;
122*4882a593Smuzhiyun }
123*4882a593Smuzhiyun 
rockchip_drm_release_reserve_vm(struct drm_device * drm,struct drm_mm_node * node)124*4882a593Smuzhiyun static void rockchip_drm_release_reserve_vm(struct drm_device *drm, struct drm_mm_node *node)
125*4882a593Smuzhiyun {
126*4882a593Smuzhiyun 	struct rockchip_drm_private *private = drm->dev_private;
127*4882a593Smuzhiyun 
128*4882a593Smuzhiyun 	mutex_lock(&private->mm_lock);
129*4882a593Smuzhiyun 	if (drm_mm_node_allocated(node))
130*4882a593Smuzhiyun 		drm_mm_remove_node(node);
131*4882a593Smuzhiyun 	mutex_unlock(&private->mm_lock);
132*4882a593Smuzhiyun }
133*4882a593Smuzhiyun 
rockchip_drm_reserve_vm(struct drm_device * drm,struct drm_mm * mm,struct drm_mm_node * node,u64 size,u64 offset)134*4882a593Smuzhiyun static int rockchip_drm_reserve_vm(struct drm_device *drm, struct drm_mm *mm,
135*4882a593Smuzhiyun 				   struct drm_mm_node *node, u64 size, u64 offset)
136*4882a593Smuzhiyun {
137*4882a593Smuzhiyun 	struct rockchip_drm_private *private = drm->dev_private;
138*4882a593Smuzhiyun 	int ret;
139*4882a593Smuzhiyun 
140*4882a593Smuzhiyun 	node->size = size;
141*4882a593Smuzhiyun 	node->start = offset;
142*4882a593Smuzhiyun 	node->color = 0;
143*4882a593Smuzhiyun 	mutex_lock(&private->mm_lock);
144*4882a593Smuzhiyun 	ret = drm_mm_reserve_node(mm, node);
145*4882a593Smuzhiyun 	mutex_unlock(&private->mm_lock);
146*4882a593Smuzhiyun 
147*4882a593Smuzhiyun 	return ret;
148*4882a593Smuzhiyun }
149*4882a593Smuzhiyun 
150*4882a593Smuzhiyun static unsigned long
rockchip_drm_free_reserved_area(phys_addr_t start,phys_addr_t end,int poison,const char * s)151*4882a593Smuzhiyun rockchip_drm_free_reserved_area(phys_addr_t start, phys_addr_t end, int poison, const char *s)
152*4882a593Smuzhiyun {
153*4882a593Smuzhiyun 	unsigned long pages = 0;
154*4882a593Smuzhiyun 
155*4882a593Smuzhiyun 	start = ALIGN_DOWN(start, PAGE_SIZE);
156*4882a593Smuzhiyun 	end = PAGE_ALIGN(end);
157*4882a593Smuzhiyun 	for (; start < end; start += PAGE_SIZE) {
158*4882a593Smuzhiyun 		struct page *page = phys_to_page(start);
159*4882a593Smuzhiyun 		void *direct_map_addr;
160*4882a593Smuzhiyun 
161*4882a593Smuzhiyun 		if (!pfn_valid(__phys_to_pfn(start)))
162*4882a593Smuzhiyun 			continue;
163*4882a593Smuzhiyun 
164*4882a593Smuzhiyun 		/*
165*4882a593Smuzhiyun 		 * 'direct_map_addr' might be different from 'pos'
166*4882a593Smuzhiyun 		 * because some architectures' virt_to_page()
167*4882a593Smuzhiyun 		 * work with aliases.  Getting the direct map
168*4882a593Smuzhiyun 		 * address ensures that we get a _writeable_
169*4882a593Smuzhiyun 		 * alias for the memset().
170*4882a593Smuzhiyun 		 */
171*4882a593Smuzhiyun 		direct_map_addr = page_address(page);
172*4882a593Smuzhiyun 		/*
173*4882a593Smuzhiyun 		 * Perform a kasan-unchecked memset() since this memory
174*4882a593Smuzhiyun 		 * has not been initialized.
175*4882a593Smuzhiyun 		 */
176*4882a593Smuzhiyun 		direct_map_addr = kasan_reset_tag(direct_map_addr);
177*4882a593Smuzhiyun 		if ((unsigned int)poison <= 0xFF)
178*4882a593Smuzhiyun 			memset(direct_map_addr, poison, PAGE_SIZE);
179*4882a593Smuzhiyun 
180*4882a593Smuzhiyun 		free_reserved_page(page);
181*4882a593Smuzhiyun 		pages++;
182*4882a593Smuzhiyun 	}
183*4882a593Smuzhiyun 
184*4882a593Smuzhiyun 	if (pages && s)
185*4882a593Smuzhiyun 		pr_info("Freeing %s memory: %ldK\n", s, pages << (PAGE_SHIFT - 10));
186*4882a593Smuzhiyun 
187*4882a593Smuzhiyun 	return pages;
188*4882a593Smuzhiyun }
189*4882a593Smuzhiyun 
rockchip_free_loader_memory(struct drm_device * drm)190*4882a593Smuzhiyun void rockchip_free_loader_memory(struct drm_device *drm)
191*4882a593Smuzhiyun {
192*4882a593Smuzhiyun 	struct rockchip_drm_private *private = drm->dev_private;
193*4882a593Smuzhiyun 	struct rockchip_logo *logo;
194*4882a593Smuzhiyun 
195*4882a593Smuzhiyun 	if (!private || !private->logo || --private->logo->count)
196*4882a593Smuzhiyun 		return;
197*4882a593Smuzhiyun 
198*4882a593Smuzhiyun 	logo = private->logo;
199*4882a593Smuzhiyun 
200*4882a593Smuzhiyun 	if (private->domain) {
201*4882a593Smuzhiyun 		u32 pg_size = 1UL << __ffs(private->domain->pgsize_bitmap);
202*4882a593Smuzhiyun 
203*4882a593Smuzhiyun 		iommu_unmap(private->domain, logo->dma_addr, ALIGN(logo->size, pg_size));
204*4882a593Smuzhiyun 		rockchip_drm_release_reserve_vm(drm, &logo->logo_reserved_node);
205*4882a593Smuzhiyun 	}
206*4882a593Smuzhiyun 
207*4882a593Smuzhiyun 	memblock_free(logo->start, logo->size);
208*4882a593Smuzhiyun 	rockchip_drm_free_reserved_area(logo->start, logo->start + logo->size,
209*4882a593Smuzhiyun 					-1, "drm_logo");
210*4882a593Smuzhiyun 	kfree(logo);
211*4882a593Smuzhiyun 	private->logo = NULL;
212*4882a593Smuzhiyun 	private->loader_protect = false;
213*4882a593Smuzhiyun }
214*4882a593Smuzhiyun 
init_loader_memory(struct drm_device * drm_dev)215*4882a593Smuzhiyun static int init_loader_memory(struct drm_device *drm_dev)
216*4882a593Smuzhiyun {
217*4882a593Smuzhiyun 	struct rockchip_drm_private *private = drm_dev->dev_private;
218*4882a593Smuzhiyun 	struct rockchip_logo *logo;
219*4882a593Smuzhiyun 	struct device_node *np = drm_dev->dev->of_node;
220*4882a593Smuzhiyun 	struct device_node *node;
221*4882a593Smuzhiyun 	phys_addr_t start, size;
222*4882a593Smuzhiyun 	u32 pg_size = PAGE_SIZE;
223*4882a593Smuzhiyun 	struct resource res;
224*4882a593Smuzhiyun 	int ret, idx;
225*4882a593Smuzhiyun 
226*4882a593Smuzhiyun 	idx = of_property_match_string(np, "memory-region-names", "drm-logo");
227*4882a593Smuzhiyun 	if (idx >= 0)
228*4882a593Smuzhiyun 		node = of_parse_phandle(np, "memory-region", idx);
229*4882a593Smuzhiyun 	else
230*4882a593Smuzhiyun 		node = of_parse_phandle(np, "logo-memory-region", 0);
231*4882a593Smuzhiyun 	if (!node)
232*4882a593Smuzhiyun 		return -ENOMEM;
233*4882a593Smuzhiyun 
234*4882a593Smuzhiyun 	ret = of_address_to_resource(node, 0, &res);
235*4882a593Smuzhiyun 	if (ret)
236*4882a593Smuzhiyun 		return ret;
237*4882a593Smuzhiyun 	if (private->domain)
238*4882a593Smuzhiyun 		pg_size = 1UL << __ffs(private->domain->pgsize_bitmap);
239*4882a593Smuzhiyun 	start = ALIGN_DOWN(res.start, pg_size);
240*4882a593Smuzhiyun 	size = resource_size(&res);
241*4882a593Smuzhiyun 	if (!size)
242*4882a593Smuzhiyun 		return -ENOMEM;
243*4882a593Smuzhiyun 	if (!IS_ALIGNED(res.start, PAGE_SIZE) || !IS_ALIGNED(size, PAGE_SIZE))
244*4882a593Smuzhiyun 		DRM_ERROR("Reserved logo memory should be aligned as:0x%lx, cureent is:start[%pad] size[%pad]\n",
245*4882a593Smuzhiyun 			  PAGE_SIZE, &res.start, &size);
246*4882a593Smuzhiyun 	if (pg_size != PAGE_SIZE)
247*4882a593Smuzhiyun 		DRM_WARN("iommu page size[0x%x] isn't equal to OS page size[0x%lx]\n", pg_size, PAGE_SIZE);
248*4882a593Smuzhiyun 
249*4882a593Smuzhiyun 	logo = kmalloc(sizeof(*logo), GFP_KERNEL);
250*4882a593Smuzhiyun 	if (!logo)
251*4882a593Smuzhiyun 		return -ENOMEM;
252*4882a593Smuzhiyun 
253*4882a593Smuzhiyun 	logo->kvaddr = phys_to_virt(start);
254*4882a593Smuzhiyun 
255*4882a593Smuzhiyun 	if (private->domain) {
256*4882a593Smuzhiyun 		ret = rockchip_drm_reserve_vm(drm_dev, &private->mm, &logo->logo_reserved_node, size, start);
257*4882a593Smuzhiyun 		if (ret)
258*4882a593Smuzhiyun 			dev_err(drm_dev->dev, "failed to reserve vm for logo memory\n");
259*4882a593Smuzhiyun 		ret = iommu_map(private->domain, start, start, ALIGN(size, pg_size),
260*4882a593Smuzhiyun 				IOMMU_WRITE | IOMMU_READ);
261*4882a593Smuzhiyun 		if (ret) {
262*4882a593Smuzhiyun 			dev_err(drm_dev->dev, "failed to create 1v1 mapping\n");
263*4882a593Smuzhiyun 			goto err_free_logo;
264*4882a593Smuzhiyun 		}
265*4882a593Smuzhiyun 	}
266*4882a593Smuzhiyun 
267*4882a593Smuzhiyun 	logo->dma_addr = start;
268*4882a593Smuzhiyun 	logo->start = res.start;
269*4882a593Smuzhiyun 	logo->size = size;
270*4882a593Smuzhiyun 	logo->count = 1;
271*4882a593Smuzhiyun 	private->logo = logo;
272*4882a593Smuzhiyun 
273*4882a593Smuzhiyun 	idx = of_property_match_string(np, "memory-region-names", "drm-cubic-lut");
274*4882a593Smuzhiyun 	if (idx < 0)
275*4882a593Smuzhiyun 		return 0;
276*4882a593Smuzhiyun 
277*4882a593Smuzhiyun 	node = of_parse_phandle(np, "memory-region", idx);
278*4882a593Smuzhiyun 	if (!node)
279*4882a593Smuzhiyun 		return -ENOMEM;
280*4882a593Smuzhiyun 
281*4882a593Smuzhiyun 	ret = of_address_to_resource(node, 0, &res);
282*4882a593Smuzhiyun 	if (ret)
283*4882a593Smuzhiyun 		return ret;
284*4882a593Smuzhiyun 	start = ALIGN_DOWN(res.start, pg_size);
285*4882a593Smuzhiyun 	size = resource_size(&res);
286*4882a593Smuzhiyun 	if (!size)
287*4882a593Smuzhiyun 		return 0;
288*4882a593Smuzhiyun 	if (!IS_ALIGNED(res.start, PAGE_SIZE) || !IS_ALIGNED(size, PAGE_SIZE))
289*4882a593Smuzhiyun 		DRM_ERROR("Reserved drm cubic memory should be aligned as:0x%lx, cureent is:start[%pad] size[%pad]\n",
290*4882a593Smuzhiyun 			  PAGE_SIZE, &res.start, &size);
291*4882a593Smuzhiyun 
292*4882a593Smuzhiyun 	private->cubic_lut_kvaddr = phys_to_virt(start);
293*4882a593Smuzhiyun 	if (private->domain) {
294*4882a593Smuzhiyun 		private->clut_reserved_node = kmalloc(sizeof(struct drm_mm_node), GFP_KERNEL);
295*4882a593Smuzhiyun 		if (!private->clut_reserved_node)
296*4882a593Smuzhiyun 			return -ENOMEM;
297*4882a593Smuzhiyun 
298*4882a593Smuzhiyun 		ret = rockchip_drm_reserve_vm(drm_dev, &private->mm, private->clut_reserved_node, size, start);
299*4882a593Smuzhiyun 		if (ret)
300*4882a593Smuzhiyun 			dev_err(drm_dev->dev, "failed to reserve vm for clut memory\n");
301*4882a593Smuzhiyun 
302*4882a593Smuzhiyun 		ret = iommu_map(private->domain, start, start, ALIGN(size, pg_size),
303*4882a593Smuzhiyun 				IOMMU_WRITE | IOMMU_READ);
304*4882a593Smuzhiyun 		if (ret) {
305*4882a593Smuzhiyun 			dev_err(drm_dev->dev, "failed to create 1v1 mapping for cubic lut\n");
306*4882a593Smuzhiyun 			goto err_free_clut;
307*4882a593Smuzhiyun 		}
308*4882a593Smuzhiyun 	}
309*4882a593Smuzhiyun 	private->cubic_lut_dma_addr = start;
310*4882a593Smuzhiyun 
311*4882a593Smuzhiyun 	return 0;
312*4882a593Smuzhiyun 
313*4882a593Smuzhiyun err_free_clut:
314*4882a593Smuzhiyun 	rockchip_drm_release_reserve_vm(drm_dev, private->clut_reserved_node);
315*4882a593Smuzhiyun 	kfree(private->clut_reserved_node);
316*4882a593Smuzhiyun 	private->clut_reserved_node = NULL;
317*4882a593Smuzhiyun err_free_logo:
318*4882a593Smuzhiyun 	rockchip_drm_release_reserve_vm(drm_dev, &logo->logo_reserved_node);
319*4882a593Smuzhiyun 	kfree(logo);
320*4882a593Smuzhiyun 
321*4882a593Smuzhiyun 	return ret;
322*4882a593Smuzhiyun }
323*4882a593Smuzhiyun 
324*4882a593Smuzhiyun static struct drm_framebuffer *
get_framebuffer_by_node(struct drm_device * drm_dev,struct device_node * node)325*4882a593Smuzhiyun get_framebuffer_by_node(struct drm_device *drm_dev, struct device_node *node)
326*4882a593Smuzhiyun {
327*4882a593Smuzhiyun 	struct rockchip_drm_private *private = drm_dev->dev_private;
328*4882a593Smuzhiyun 	struct drm_mode_fb_cmd2 mode_cmd = { 0 };
329*4882a593Smuzhiyun 	u32 val;
330*4882a593Smuzhiyun 	int bpp;
331*4882a593Smuzhiyun 
332*4882a593Smuzhiyun 	if (WARN_ON(!private->logo))
333*4882a593Smuzhiyun 		return NULL;
334*4882a593Smuzhiyun 
335*4882a593Smuzhiyun 	if (of_property_read_u32(node, "logo,offset", &val)) {
336*4882a593Smuzhiyun 		dev_err(drm_dev->dev, "%s: failed to get logo,offset\n", node->full_name);
337*4882a593Smuzhiyun 		return NULL;
338*4882a593Smuzhiyun 	}
339*4882a593Smuzhiyun 	mode_cmd.offsets[0] = val;
340*4882a593Smuzhiyun 
341*4882a593Smuzhiyun 	if (of_property_read_u32(node, "logo,width", &val)) {
342*4882a593Smuzhiyun 		dev_err(drm_dev->dev, "%s: failed to get logo,width\n", node->full_name);
343*4882a593Smuzhiyun 		return NULL;
344*4882a593Smuzhiyun 	}
345*4882a593Smuzhiyun 	mode_cmd.width = val;
346*4882a593Smuzhiyun 
347*4882a593Smuzhiyun 	if (of_property_read_u32(node, "logo,height", &val)) {
348*4882a593Smuzhiyun 		dev_err(drm_dev->dev, "%s: failed to get logo,height\n", node->full_name);
349*4882a593Smuzhiyun 		return NULL;
350*4882a593Smuzhiyun 	}
351*4882a593Smuzhiyun 	mode_cmd.height = val;
352*4882a593Smuzhiyun 
353*4882a593Smuzhiyun 	if (of_property_read_u32(node, "logo,bpp", &val)) {
354*4882a593Smuzhiyun 		dev_err(drm_dev->dev, "%s: failed to get logo,bpp\n", node->full_name);
355*4882a593Smuzhiyun 		return NULL;
356*4882a593Smuzhiyun 	}
357*4882a593Smuzhiyun 	bpp = val;
358*4882a593Smuzhiyun 
359*4882a593Smuzhiyun 	mode_cmd.pitches[0] = ALIGN(mode_cmd.width * bpp, 32) / 8;
360*4882a593Smuzhiyun 
361*4882a593Smuzhiyun 	switch (bpp) {
362*4882a593Smuzhiyun 	case 16:
363*4882a593Smuzhiyun 		mode_cmd.pixel_format = DRM_FORMAT_RGB565;
364*4882a593Smuzhiyun 		break;
365*4882a593Smuzhiyun 	case 24:
366*4882a593Smuzhiyun 		mode_cmd.pixel_format = DRM_FORMAT_RGB888;
367*4882a593Smuzhiyun 		break;
368*4882a593Smuzhiyun 	case 32:
369*4882a593Smuzhiyun 		mode_cmd.pixel_format = DRM_FORMAT_XRGB8888;
370*4882a593Smuzhiyun 		break;
371*4882a593Smuzhiyun 	default:
372*4882a593Smuzhiyun 		dev_err(drm_dev->dev, "%s: unsupported to logo bpp %d\n", node->full_name, bpp);
373*4882a593Smuzhiyun 		return NULL;
374*4882a593Smuzhiyun 	}
375*4882a593Smuzhiyun 
376*4882a593Smuzhiyun 	return rockchip_drm_logo_fb_alloc(drm_dev, &mode_cmd, private->logo);
377*4882a593Smuzhiyun }
378*4882a593Smuzhiyun 
of_parse_post_csc_info(struct device_node * route,struct rockchip_drm_mode_set * set)379*4882a593Smuzhiyun static void of_parse_post_csc_info(struct device_node *route, struct rockchip_drm_mode_set *set)
380*4882a593Smuzhiyun {
381*4882a593Smuzhiyun 	int val;
382*4882a593Smuzhiyun 
383*4882a593Smuzhiyun 	if (!of_property_read_u32(route, "post-csc,enable", &val))
384*4882a593Smuzhiyun 		set->csc.csc_enable = val;
385*4882a593Smuzhiyun 	else
386*4882a593Smuzhiyun 		set->csc.csc_enable = 0;
387*4882a593Smuzhiyun 
388*4882a593Smuzhiyun 	if (!set->csc.csc_enable)
389*4882a593Smuzhiyun 		return;
390*4882a593Smuzhiyun 
391*4882a593Smuzhiyun 	if (!of_property_read_u32(route, "post-csc,hue", &val))
392*4882a593Smuzhiyun 		set->csc.hue = val;
393*4882a593Smuzhiyun 	else
394*4882a593Smuzhiyun 		set->csc.hue = 256;
395*4882a593Smuzhiyun 
396*4882a593Smuzhiyun 	if (!of_property_read_u32(route, "post-csc,saturation", &val))
397*4882a593Smuzhiyun 		set->csc.saturation = val;
398*4882a593Smuzhiyun 	else
399*4882a593Smuzhiyun 		set->csc.saturation = 256;
400*4882a593Smuzhiyun 
401*4882a593Smuzhiyun 	if (!of_property_read_u32(route, "post-csc,contrast", &val))
402*4882a593Smuzhiyun 		set->csc.contrast = val;
403*4882a593Smuzhiyun 	else
404*4882a593Smuzhiyun 		set->csc.contrast = 256;
405*4882a593Smuzhiyun 
406*4882a593Smuzhiyun 	if (!of_property_read_u32(route, "post-csc,brightness", &val))
407*4882a593Smuzhiyun 		set->csc.brightness = val;
408*4882a593Smuzhiyun 	else
409*4882a593Smuzhiyun 		set->csc.brightness = 256;
410*4882a593Smuzhiyun 
411*4882a593Smuzhiyun 	if (!of_property_read_u32(route, "post-csc,r-gain", &val))
412*4882a593Smuzhiyun 		set->csc.r_gain = val;
413*4882a593Smuzhiyun 	else
414*4882a593Smuzhiyun 		set->csc.r_gain = 256;
415*4882a593Smuzhiyun 
416*4882a593Smuzhiyun 	if (!of_property_read_u32(route, "post-csc,g-gain", &val))
417*4882a593Smuzhiyun 		set->csc.g_gain = val;
418*4882a593Smuzhiyun 	else
419*4882a593Smuzhiyun 		set->csc.g_gain = 256;
420*4882a593Smuzhiyun 
421*4882a593Smuzhiyun 	if (!of_property_read_u32(route, "post-csc,b-gain", &val))
422*4882a593Smuzhiyun 		set->csc.b_gain = val;
423*4882a593Smuzhiyun 	else
424*4882a593Smuzhiyun 		set->csc.b_gain = 256;
425*4882a593Smuzhiyun 
426*4882a593Smuzhiyun 	if (!of_property_read_u32(route, "post-csc,r-offset", &val))
427*4882a593Smuzhiyun 		set->csc.r_offset = val;
428*4882a593Smuzhiyun 	else
429*4882a593Smuzhiyun 		set->csc.r_offset = 256;
430*4882a593Smuzhiyun 
431*4882a593Smuzhiyun 	if (!of_property_read_u32(route, "post-csc,g-offset", &val))
432*4882a593Smuzhiyun 		set->csc.g_offset = val;
433*4882a593Smuzhiyun 	else
434*4882a593Smuzhiyun 		set->csc.g_offset = 256;
435*4882a593Smuzhiyun 
436*4882a593Smuzhiyun 	if (!of_property_read_u32(route, "post-csc,b-offset", &val))
437*4882a593Smuzhiyun 		set->csc.b_offset = val;
438*4882a593Smuzhiyun 	else
439*4882a593Smuzhiyun 		set->csc.b_offset = 256;
440*4882a593Smuzhiyun }
441*4882a593Smuzhiyun 
442*4882a593Smuzhiyun static struct rockchip_drm_mode_set *
of_parse_display_resource(struct drm_device * drm_dev,struct device_node * route)443*4882a593Smuzhiyun of_parse_display_resource(struct drm_device *drm_dev, struct device_node *route)
444*4882a593Smuzhiyun {
445*4882a593Smuzhiyun 	struct rockchip_drm_private *private = drm_dev->dev_private;
446*4882a593Smuzhiyun 	struct rockchip_drm_mode_set *set;
447*4882a593Smuzhiyun 	struct device_node *connect;
448*4882a593Smuzhiyun 	struct drm_framebuffer *fb;
449*4882a593Smuzhiyun 	struct rockchip_drm_sub_dev *sub_dev;
450*4882a593Smuzhiyun 	struct drm_crtc *crtc;
451*4882a593Smuzhiyun 	const char *string;
452*4882a593Smuzhiyun 	u32 val;
453*4882a593Smuzhiyun 
454*4882a593Smuzhiyun 	connect = of_parse_phandle(route, "connect", 0);
455*4882a593Smuzhiyun 	if (!connect)
456*4882a593Smuzhiyun 		return NULL;
457*4882a593Smuzhiyun 
458*4882a593Smuzhiyun 	fb = get_framebuffer_by_node(drm_dev, route);
459*4882a593Smuzhiyun 	if (IS_ERR_OR_NULL(fb))
460*4882a593Smuzhiyun 		return NULL;
461*4882a593Smuzhiyun 
462*4882a593Smuzhiyun 	crtc = find_crtc_by_node(drm_dev, connect);
463*4882a593Smuzhiyun 
464*4882a593Smuzhiyun 	sub_dev = find_sub_dev_by_node(drm_dev, connect);
465*4882a593Smuzhiyun 
466*4882a593Smuzhiyun 	if (!sub_dev)
467*4882a593Smuzhiyun 		sub_dev = find_sub_dev_by_bridge(drm_dev, connect);
468*4882a593Smuzhiyun 
469*4882a593Smuzhiyun 	if (!crtc || !sub_dev) {
470*4882a593Smuzhiyun 		dev_warn(drm_dev->dev,
471*4882a593Smuzhiyun 			 "No available crtc or connector for display");
472*4882a593Smuzhiyun 		drm_framebuffer_put(fb);
473*4882a593Smuzhiyun 		return NULL;
474*4882a593Smuzhiyun 	}
475*4882a593Smuzhiyun 
476*4882a593Smuzhiyun 	set = kzalloc(sizeof(*set), GFP_KERNEL);
477*4882a593Smuzhiyun 	if (!set)
478*4882a593Smuzhiyun 		return NULL;
479*4882a593Smuzhiyun 
480*4882a593Smuzhiyun 	if (!of_property_read_u32(route, "video,clock", &val))
481*4882a593Smuzhiyun 		set->clock = val;
482*4882a593Smuzhiyun 
483*4882a593Smuzhiyun 	if (!of_property_read_u32(route, "video,hdisplay", &val))
484*4882a593Smuzhiyun 		set->hdisplay = val;
485*4882a593Smuzhiyun 
486*4882a593Smuzhiyun 	if (!of_property_read_u32(route, "video,vdisplay", &val))
487*4882a593Smuzhiyun 		set->vdisplay = val;
488*4882a593Smuzhiyun 
489*4882a593Smuzhiyun 	if (!of_property_read_u32(route, "video,crtc_hsync_end", &val))
490*4882a593Smuzhiyun 		set->crtc_hsync_end = val;
491*4882a593Smuzhiyun 
492*4882a593Smuzhiyun 	if (!of_property_read_u32(route, "video,crtc_vsync_end", &val))
493*4882a593Smuzhiyun 		set->crtc_vsync_end = val;
494*4882a593Smuzhiyun 
495*4882a593Smuzhiyun 	if (!of_property_read_u32(route, "video,vrefresh", &val))
496*4882a593Smuzhiyun 		set->vrefresh = val;
497*4882a593Smuzhiyun 
498*4882a593Smuzhiyun 	if (!of_property_read_u32(route, "video,flags", &val))
499*4882a593Smuzhiyun 		set->flags = val;
500*4882a593Smuzhiyun 
501*4882a593Smuzhiyun 	if (!of_property_read_u32(route, "video,aspect_ratio", &val))
502*4882a593Smuzhiyun 		set->picture_aspect_ratio = val;
503*4882a593Smuzhiyun 
504*4882a593Smuzhiyun 	if (!of_property_read_u32(route, "overscan,left_margin", &val))
505*4882a593Smuzhiyun 		set->left_margin = val;
506*4882a593Smuzhiyun 
507*4882a593Smuzhiyun 	if (!of_property_read_u32(route, "overscan,right_margin", &val))
508*4882a593Smuzhiyun 		set->right_margin = val;
509*4882a593Smuzhiyun 
510*4882a593Smuzhiyun 	if (!of_property_read_u32(route, "overscan,top_margin", &val))
511*4882a593Smuzhiyun 		set->top_margin = val;
512*4882a593Smuzhiyun 
513*4882a593Smuzhiyun 	if (!of_property_read_u32(route, "overscan,bottom_margin", &val))
514*4882a593Smuzhiyun 		set->bottom_margin = val;
515*4882a593Smuzhiyun 
516*4882a593Smuzhiyun 	if (!of_property_read_u32(route, "bcsh,brightness", &val))
517*4882a593Smuzhiyun 		set->brightness = val;
518*4882a593Smuzhiyun 	else
519*4882a593Smuzhiyun 		set->brightness = 50;
520*4882a593Smuzhiyun 
521*4882a593Smuzhiyun 	if (!of_property_read_u32(route, "bcsh,contrast", &val))
522*4882a593Smuzhiyun 		set->contrast = val;
523*4882a593Smuzhiyun 	else
524*4882a593Smuzhiyun 		set->contrast = 50;
525*4882a593Smuzhiyun 
526*4882a593Smuzhiyun 	if (!of_property_read_u32(route, "bcsh,saturation", &val))
527*4882a593Smuzhiyun 		set->saturation = val;
528*4882a593Smuzhiyun 	else
529*4882a593Smuzhiyun 		set->saturation = 50;
530*4882a593Smuzhiyun 
531*4882a593Smuzhiyun 	if (!of_property_read_u32(route, "bcsh,hue", &val))
532*4882a593Smuzhiyun 		set->hue = val;
533*4882a593Smuzhiyun 	else
534*4882a593Smuzhiyun 		set->hue = 50;
535*4882a593Smuzhiyun 
536*4882a593Smuzhiyun 	of_parse_post_csc_info(route, set);
537*4882a593Smuzhiyun 
538*4882a593Smuzhiyun 	set->force_output = of_property_read_bool(route, "force-output");
539*4882a593Smuzhiyun 
540*4882a593Smuzhiyun 	if (!of_property_read_u32(route, "cubic_lut,offset", &val)) {
541*4882a593Smuzhiyun 		private->cubic_lut[crtc->index].enable = true;
542*4882a593Smuzhiyun 		private->cubic_lut[crtc->index].offset = val;
543*4882a593Smuzhiyun 	}
544*4882a593Smuzhiyun 
545*4882a593Smuzhiyun 	set->ratio = 1;
546*4882a593Smuzhiyun 	if (!of_property_read_string(route, "logo,mode", &string) &&
547*4882a593Smuzhiyun 	    !strcmp(string, "fullscreen"))
548*4882a593Smuzhiyun 		set->ratio = 0;
549*4882a593Smuzhiyun 
550*4882a593Smuzhiyun 	set->fb = fb;
551*4882a593Smuzhiyun 	set->crtc = crtc;
552*4882a593Smuzhiyun 	set->sub_dev = sub_dev;
553*4882a593Smuzhiyun 
554*4882a593Smuzhiyun 	return set;
555*4882a593Smuzhiyun }
556*4882a593Smuzhiyun 
rockchip_drm_fill_connector_modes(struct drm_connector * connector,uint32_t maxX,uint32_t maxY,bool force_output)557*4882a593Smuzhiyun static int rockchip_drm_fill_connector_modes(struct drm_connector *connector,
558*4882a593Smuzhiyun 					     uint32_t maxX, uint32_t maxY,
559*4882a593Smuzhiyun 					     bool force_output)
560*4882a593Smuzhiyun {
561*4882a593Smuzhiyun 	struct drm_device *dev = connector->dev;
562*4882a593Smuzhiyun 	struct drm_display_mode *mode;
563*4882a593Smuzhiyun 	const struct drm_connector_helper_funcs *connector_funcs =
564*4882a593Smuzhiyun 		connector->helper_private;
565*4882a593Smuzhiyun 	int count = 0;
566*4882a593Smuzhiyun 	bool verbose_prune = true;
567*4882a593Smuzhiyun 	enum drm_connector_status old_status;
568*4882a593Smuzhiyun 
569*4882a593Smuzhiyun 	WARN_ON(!mutex_is_locked(&dev->mode_config.mutex));
570*4882a593Smuzhiyun 
571*4882a593Smuzhiyun 	DRM_DEBUG_KMS("[CONNECTOR:%d:%s]\n", connector->base.id,
572*4882a593Smuzhiyun 		      connector->name);
573*4882a593Smuzhiyun 	/* set all modes to the unverified state */
574*4882a593Smuzhiyun 	list_for_each_entry(mode, &connector->modes, head)
575*4882a593Smuzhiyun 		mode->status = MODE_STALE;
576*4882a593Smuzhiyun 
577*4882a593Smuzhiyun 	if (force_output)
578*4882a593Smuzhiyun 		connector->force = DRM_FORCE_ON;
579*4882a593Smuzhiyun 	if (connector->force) {
580*4882a593Smuzhiyun 		if (connector->force == DRM_FORCE_ON ||
581*4882a593Smuzhiyun 		    connector->force == DRM_FORCE_ON_DIGITAL)
582*4882a593Smuzhiyun 			connector->status = connector_status_connected;
583*4882a593Smuzhiyun 		else
584*4882a593Smuzhiyun 			connector->status = connector_status_disconnected;
585*4882a593Smuzhiyun 		if (connector->funcs->force)
586*4882a593Smuzhiyun 			connector->funcs->force(connector);
587*4882a593Smuzhiyun 	} else {
588*4882a593Smuzhiyun 		old_status = connector->status;
589*4882a593Smuzhiyun 
590*4882a593Smuzhiyun 		if (connector->funcs->detect)
591*4882a593Smuzhiyun 			connector->status = connector->funcs->detect(connector, true);
592*4882a593Smuzhiyun 		else
593*4882a593Smuzhiyun 			connector->status  = connector_status_connected;
594*4882a593Smuzhiyun 		/*
595*4882a593Smuzhiyun 		 * Normally either the driver's hpd code or the poll loop should
596*4882a593Smuzhiyun 		 * pick up any changes and fire the hotplug event. But if
597*4882a593Smuzhiyun 		 * userspace sneaks in a probe, we might miss a change. Hence
598*4882a593Smuzhiyun 		 * check here, and if anything changed start the hotplug code.
599*4882a593Smuzhiyun 		 */
600*4882a593Smuzhiyun 		if (old_status != connector->status) {
601*4882a593Smuzhiyun 			DRM_DEBUG_KMS("[CONNECTOR:%d:%s] status updated from %d to %d\n",
602*4882a593Smuzhiyun 				      connector->base.id,
603*4882a593Smuzhiyun 				      connector->name,
604*4882a593Smuzhiyun 				      old_status, connector->status);
605*4882a593Smuzhiyun 
606*4882a593Smuzhiyun 			/*
607*4882a593Smuzhiyun 			 * The hotplug event code might call into the fb
608*4882a593Smuzhiyun 			 * helpers, and so expects that we do not hold any
609*4882a593Smuzhiyun 			 * locks. Fire up the poll struct instead, it will
610*4882a593Smuzhiyun 			 * disable itself again.
611*4882a593Smuzhiyun 			 */
612*4882a593Smuzhiyun 			dev->mode_config.delayed_event = true;
613*4882a593Smuzhiyun 			if (dev->mode_config.poll_enabled)
614*4882a593Smuzhiyun 				schedule_delayed_work(&dev->mode_config.output_poll_work,
615*4882a593Smuzhiyun 						      0);
616*4882a593Smuzhiyun 		}
617*4882a593Smuzhiyun 	}
618*4882a593Smuzhiyun 
619*4882a593Smuzhiyun 	/* Re-enable polling in case the global poll config changed. */
620*4882a593Smuzhiyun 	if (!dev->mode_config.poll_running)
621*4882a593Smuzhiyun 		drm_kms_helper_poll_enable(dev);
622*4882a593Smuzhiyun 
623*4882a593Smuzhiyun 	dev->mode_config.poll_running = true;
624*4882a593Smuzhiyun 
625*4882a593Smuzhiyun 	if (connector->status == connector_status_disconnected) {
626*4882a593Smuzhiyun 		DRM_DEBUG_KMS("[CONNECTOR:%d:%s] disconnected\n",
627*4882a593Smuzhiyun 			      connector->base.id, connector->name);
628*4882a593Smuzhiyun 		drm_connector_update_edid_property(connector, NULL);
629*4882a593Smuzhiyun 		verbose_prune = false;
630*4882a593Smuzhiyun 		goto prune;
631*4882a593Smuzhiyun 	}
632*4882a593Smuzhiyun 
633*4882a593Smuzhiyun 	if (!force_output)
634*4882a593Smuzhiyun 		count = (*connector_funcs->get_modes)(connector);
635*4882a593Smuzhiyun 
636*4882a593Smuzhiyun 	if (count == 0 && connector->status == connector_status_connected)
637*4882a593Smuzhiyun 		count = drm_add_modes_noedid(connector, 4096, 4096);
638*4882a593Smuzhiyun 	if (force_output)
639*4882a593Smuzhiyun 		count += rockchip_drm_add_modes_noedid(connector);
640*4882a593Smuzhiyun 	if (count == 0)
641*4882a593Smuzhiyun 		goto prune;
642*4882a593Smuzhiyun 
643*4882a593Smuzhiyun 	drm_connector_list_update(connector);
644*4882a593Smuzhiyun 
645*4882a593Smuzhiyun 	list_for_each_entry(mode, &connector->modes, head) {
646*4882a593Smuzhiyun 		if (mode->status == MODE_OK)
647*4882a593Smuzhiyun 			mode->status = drm_mode_validate_driver(dev, mode);
648*4882a593Smuzhiyun 
649*4882a593Smuzhiyun 		if (mode->status == MODE_OK)
650*4882a593Smuzhiyun 			mode->status = drm_mode_validate_size(mode, maxX, maxY);
651*4882a593Smuzhiyun 
652*4882a593Smuzhiyun 		/**
653*4882a593Smuzhiyun 		 * if (mode->status == MODE_OK)
654*4882a593Smuzhiyun 		 *	mode->status = drm_mode_validate_flag(mode, mode_flags);
655*4882a593Smuzhiyun 		 */
656*4882a593Smuzhiyun 		if (mode->status == MODE_OK && connector_funcs->mode_valid)
657*4882a593Smuzhiyun 			mode->status = connector_funcs->mode_valid(connector,
658*4882a593Smuzhiyun 								   mode);
659*4882a593Smuzhiyun 		if (mode->status == MODE_OK)
660*4882a593Smuzhiyun 			mode->status = drm_mode_validate_ycbcr420(mode,
661*4882a593Smuzhiyun 								  connector);
662*4882a593Smuzhiyun 	}
663*4882a593Smuzhiyun 
664*4882a593Smuzhiyun prune:
665*4882a593Smuzhiyun 	drm_mode_prune_invalid(dev, &connector->modes, verbose_prune);
666*4882a593Smuzhiyun 
667*4882a593Smuzhiyun 	if (list_empty(&connector->modes))
668*4882a593Smuzhiyun 		return 0;
669*4882a593Smuzhiyun 
670*4882a593Smuzhiyun 	drm_mode_sort(&connector->modes);
671*4882a593Smuzhiyun 
672*4882a593Smuzhiyun 	DRM_DEBUG_KMS("[CONNECTOR:%d:%s] probed modes :\n", connector->base.id,
673*4882a593Smuzhiyun 		      connector->name);
674*4882a593Smuzhiyun 	list_for_each_entry(mode, &connector->modes, head) {
675*4882a593Smuzhiyun 		drm_mode_set_crtcinfo(mode, CRTC_INTERLACE_HALVE_V);
676*4882a593Smuzhiyun 		drm_mode_debug_printmodeline(mode);
677*4882a593Smuzhiyun 	}
678*4882a593Smuzhiyun 
679*4882a593Smuzhiyun 	return count;
680*4882a593Smuzhiyun }
681*4882a593Smuzhiyun 
682*4882a593Smuzhiyun /*
683*4882a593Smuzhiyun  * For connectors that support multiple encoders, either the
684*4882a593Smuzhiyun  * .atomic_best_encoder() or .best_encoder() operation must be implemented.
685*4882a593Smuzhiyun  */
686*4882a593Smuzhiyun static struct drm_encoder *
rockchip_drm_connector_get_single_encoder(struct drm_connector * connector)687*4882a593Smuzhiyun rockchip_drm_connector_get_single_encoder(struct drm_connector *connector)
688*4882a593Smuzhiyun {
689*4882a593Smuzhiyun 	struct drm_encoder *encoder;
690*4882a593Smuzhiyun 
691*4882a593Smuzhiyun 	WARN_ON(hweight32(connector->possible_encoders) > 1);
692*4882a593Smuzhiyun 	drm_connector_for_each_possible_encoder(connector, encoder)
693*4882a593Smuzhiyun 		return encoder;
694*4882a593Smuzhiyun 
695*4882a593Smuzhiyun 	return NULL;
696*4882a593Smuzhiyun }
697*4882a593Smuzhiyun 
setup_initial_state(struct drm_device * drm_dev,struct drm_atomic_state * state,struct rockchip_drm_mode_set * set)698*4882a593Smuzhiyun static int setup_initial_state(struct drm_device *drm_dev,
699*4882a593Smuzhiyun 			       struct drm_atomic_state *state,
700*4882a593Smuzhiyun 			       struct rockchip_drm_mode_set *set)
701*4882a593Smuzhiyun {
702*4882a593Smuzhiyun 	struct rockchip_drm_private *priv = drm_dev->dev_private;
703*4882a593Smuzhiyun 	struct drm_connector *connector = set->sub_dev->connector;
704*4882a593Smuzhiyun 	struct drm_crtc *crtc = set->crtc;
705*4882a593Smuzhiyun 	struct drm_crtc_state *crtc_state;
706*4882a593Smuzhiyun 	struct drm_connector_state *conn_state;
707*4882a593Smuzhiyun 	struct drm_plane_state *primary_state;
708*4882a593Smuzhiyun 	struct drm_display_mode *mode = NULL;
709*4882a593Smuzhiyun 	const struct drm_connector_helper_funcs *funcs;
710*4882a593Smuzhiyun 	int pipe = drm_crtc_index(crtc);
711*4882a593Smuzhiyun 	bool is_crtc_enabled = true;
712*4882a593Smuzhiyun 	int hdisplay, vdisplay;
713*4882a593Smuzhiyun 	int fb_width, fb_height;
714*4882a593Smuzhiyun 	int found = 0, match = 0;
715*4882a593Smuzhiyun 	int num_modes;
716*4882a593Smuzhiyun 	int ret = 0;
717*4882a593Smuzhiyun 	struct rockchip_crtc_state *s = NULL;
718*4882a593Smuzhiyun 
719*4882a593Smuzhiyun 	if (!set->hdisplay || !set->vdisplay || !set->vrefresh)
720*4882a593Smuzhiyun 		is_crtc_enabled = false;
721*4882a593Smuzhiyun 
722*4882a593Smuzhiyun 	crtc->state->state = state;
723*4882a593Smuzhiyun 
724*4882a593Smuzhiyun 	conn_state = drm_atomic_get_connector_state(state, connector);
725*4882a593Smuzhiyun 	if (IS_ERR(conn_state))
726*4882a593Smuzhiyun 		return PTR_ERR(conn_state);
727*4882a593Smuzhiyun 
728*4882a593Smuzhiyun 	funcs = connector->helper_private;
729*4882a593Smuzhiyun 
730*4882a593Smuzhiyun 	if (funcs->best_encoder)
731*4882a593Smuzhiyun 		conn_state->best_encoder = funcs->best_encoder(connector);
732*4882a593Smuzhiyun 	else
733*4882a593Smuzhiyun 		conn_state->best_encoder = rockchip_drm_connector_get_single_encoder(connector);
734*4882a593Smuzhiyun 
735*4882a593Smuzhiyun 	if (set->sub_dev->loader_protect) {
736*4882a593Smuzhiyun 		ret = set->sub_dev->loader_protect(conn_state->best_encoder, true);
737*4882a593Smuzhiyun 		if (ret) {
738*4882a593Smuzhiyun 			dev_err(drm_dev->dev,
739*4882a593Smuzhiyun 				"connector[%s] loader protect failed\n",
740*4882a593Smuzhiyun 				connector->name);
741*4882a593Smuzhiyun 			return ret;
742*4882a593Smuzhiyun 		}
743*4882a593Smuzhiyun 	}
744*4882a593Smuzhiyun 
745*4882a593Smuzhiyun 	num_modes = rockchip_drm_fill_connector_modes(connector, 7680, 7680, set->force_output);
746*4882a593Smuzhiyun 	if (!num_modes) {
747*4882a593Smuzhiyun 		dev_err(drm_dev->dev, "connector[%s] can't found any modes\n",
748*4882a593Smuzhiyun 			connector->name);
749*4882a593Smuzhiyun 		ret = -EINVAL;
750*4882a593Smuzhiyun 		goto error_conn;
751*4882a593Smuzhiyun 	}
752*4882a593Smuzhiyun 
753*4882a593Smuzhiyun 	list_for_each_entry(mode, &connector->modes, head) {
754*4882a593Smuzhiyun 		if (mode->clock == set->clock &&
755*4882a593Smuzhiyun 		    mode->hdisplay == set->hdisplay &&
756*4882a593Smuzhiyun 		    mode->vdisplay == set->vdisplay &&
757*4882a593Smuzhiyun 		    mode->crtc_hsync_end == set->crtc_hsync_end &&
758*4882a593Smuzhiyun 		    mode->crtc_vsync_end == set->crtc_vsync_end &&
759*4882a593Smuzhiyun 		    drm_mode_vrefresh(mode) == set->vrefresh &&
760*4882a593Smuzhiyun 		    /* we just need to focus on DRM_MODE_FLAG_ALL flag, so here
761*4882a593Smuzhiyun 		     * we compare mode->flags with set->flags & DRM_MODE_FLAG_ALL.
762*4882a593Smuzhiyun 		     */
763*4882a593Smuzhiyun 		    mode->flags == (set->flags & DRM_MODE_FLAG_ALL) &&
764*4882a593Smuzhiyun 		    mode->picture_aspect_ratio == set->picture_aspect_ratio) {
765*4882a593Smuzhiyun 			found = 1;
766*4882a593Smuzhiyun 			match = 1;
767*4882a593Smuzhiyun 			break;
768*4882a593Smuzhiyun 		}
769*4882a593Smuzhiyun 	}
770*4882a593Smuzhiyun 
771*4882a593Smuzhiyun 	if (!found) {
772*4882a593Smuzhiyun 		ret = -EINVAL;
773*4882a593Smuzhiyun 		connector->status = connector_status_disconnected;
774*4882a593Smuzhiyun 		dev_err(drm_dev->dev, "connector[%s] can't found any match mode\n",
775*4882a593Smuzhiyun 			connector->name);
776*4882a593Smuzhiyun 		DRM_INFO("%s support modes:\n\n", connector->name);
777*4882a593Smuzhiyun 		list_for_each_entry(mode, &connector->modes, head) {
778*4882a593Smuzhiyun 			DRM_INFO(DRM_MODE_FMT "\n", DRM_MODE_ARG(mode));
779*4882a593Smuzhiyun 		}
780*4882a593Smuzhiyun 		DRM_INFO("uboot set mode: h/v display[%d,%d] h/v sync_end[%d,%d] vfresh[%d], flags[0x%x], aspect_ratio[%d]\n",
781*4882a593Smuzhiyun 			 set->hdisplay, set->vdisplay, set->crtc_hsync_end, set->crtc_vsync_end,
782*4882a593Smuzhiyun 			 set->vrefresh, set->flags, set->picture_aspect_ratio);
783*4882a593Smuzhiyun 		goto error_conn;
784*4882a593Smuzhiyun 	}
785*4882a593Smuzhiyun 
786*4882a593Smuzhiyun 	conn_state->tv.brightness = set->brightness;
787*4882a593Smuzhiyun 	conn_state->tv.contrast = set->contrast;
788*4882a593Smuzhiyun 	conn_state->tv.saturation = set->saturation;
789*4882a593Smuzhiyun 	conn_state->tv.hue = set->hue;
790*4882a593Smuzhiyun 	set->mode = mode;
791*4882a593Smuzhiyun 	crtc_state = drm_atomic_get_crtc_state(state, crtc);
792*4882a593Smuzhiyun 	if (IS_ERR(crtc_state)) {
793*4882a593Smuzhiyun 		ret = PTR_ERR(crtc_state);
794*4882a593Smuzhiyun 		goto error_conn;
795*4882a593Smuzhiyun 	}
796*4882a593Smuzhiyun 
797*4882a593Smuzhiyun 	drm_mode_copy(&crtc_state->adjusted_mode, mode);
798*4882a593Smuzhiyun 	if (!match || !is_crtc_enabled) {
799*4882a593Smuzhiyun 		set->mode_changed = true;
800*4882a593Smuzhiyun 	} else {
801*4882a593Smuzhiyun 		ret = drm_atomic_set_crtc_for_connector(conn_state, crtc);
802*4882a593Smuzhiyun 		if (ret)
803*4882a593Smuzhiyun 			goto error_conn;
804*4882a593Smuzhiyun 
805*4882a593Smuzhiyun 		mode->picture_aspect_ratio = HDMI_PICTURE_ASPECT_NONE;
806*4882a593Smuzhiyun 		ret = drm_atomic_set_mode_for_crtc(crtc_state, mode);
807*4882a593Smuzhiyun 		if (ret)
808*4882a593Smuzhiyun 			goto error_conn;
809*4882a593Smuzhiyun 
810*4882a593Smuzhiyun 		crtc_state->active = true;
811*4882a593Smuzhiyun 
812*4882a593Smuzhiyun 		if (priv->crtc_funcs[pipe] &&
813*4882a593Smuzhiyun 		    priv->crtc_funcs[pipe]->loader_protect)
814*4882a593Smuzhiyun 			priv->crtc_funcs[pipe]->loader_protect(crtc, true, &set->csc);
815*4882a593Smuzhiyun 	}
816*4882a593Smuzhiyun 
817*4882a593Smuzhiyun 	if (!set->fb) {
818*4882a593Smuzhiyun 		ret = 0;
819*4882a593Smuzhiyun 		goto error_crtc;
820*4882a593Smuzhiyun 	}
821*4882a593Smuzhiyun 	primary_state = drm_atomic_get_plane_state(state, crtc->primary);
822*4882a593Smuzhiyun 	if (IS_ERR(primary_state)) {
823*4882a593Smuzhiyun 		ret = PTR_ERR(primary_state);
824*4882a593Smuzhiyun 		goto error_crtc;
825*4882a593Smuzhiyun 	}
826*4882a593Smuzhiyun 
827*4882a593Smuzhiyun 	hdisplay = mode->hdisplay;
828*4882a593Smuzhiyun 	vdisplay = mode->vdisplay;
829*4882a593Smuzhiyun 	fb_width = set->fb->width;
830*4882a593Smuzhiyun 	fb_height = set->fb->height;
831*4882a593Smuzhiyun 
832*4882a593Smuzhiyun 	primary_state->crtc = crtc;
833*4882a593Smuzhiyun 	primary_state->src_x = 0;
834*4882a593Smuzhiyun 	primary_state->src_y = 0;
835*4882a593Smuzhiyun 	primary_state->src_w = fb_width << 16;
836*4882a593Smuzhiyun 	primary_state->src_h = fb_height << 16;
837*4882a593Smuzhiyun 	if (set->ratio) {
838*4882a593Smuzhiyun 		if (set->fb->width >= hdisplay) {
839*4882a593Smuzhiyun 			primary_state->crtc_x = 0;
840*4882a593Smuzhiyun 			primary_state->crtc_w = hdisplay;
841*4882a593Smuzhiyun 		} else {
842*4882a593Smuzhiyun 			primary_state->crtc_x = (hdisplay - fb_width) / 2;
843*4882a593Smuzhiyun 			primary_state->crtc_w = set->fb->width;
844*4882a593Smuzhiyun 		}
845*4882a593Smuzhiyun 
846*4882a593Smuzhiyun 		if (set->fb->height >= vdisplay) {
847*4882a593Smuzhiyun 			primary_state->crtc_y = 0;
848*4882a593Smuzhiyun 			primary_state->crtc_h = vdisplay;
849*4882a593Smuzhiyun 		} else {
850*4882a593Smuzhiyun 			primary_state->crtc_y = (vdisplay - fb_height) / 2;
851*4882a593Smuzhiyun 			primary_state->crtc_h = fb_height;
852*4882a593Smuzhiyun 		}
853*4882a593Smuzhiyun 	} else {
854*4882a593Smuzhiyun 		primary_state->crtc_x = 0;
855*4882a593Smuzhiyun 		primary_state->crtc_y = 0;
856*4882a593Smuzhiyun 		primary_state->crtc_w = hdisplay;
857*4882a593Smuzhiyun 		primary_state->crtc_h = vdisplay;
858*4882a593Smuzhiyun 	}
859*4882a593Smuzhiyun 	s = to_rockchip_crtc_state(crtc->state);
860*4882a593Smuzhiyun 	s->output_type = connector->connector_type;
861*4882a593Smuzhiyun 
862*4882a593Smuzhiyun 	return 0;
863*4882a593Smuzhiyun 
864*4882a593Smuzhiyun error_crtc:
865*4882a593Smuzhiyun 	if (priv->crtc_funcs[pipe] && priv->crtc_funcs[pipe]->loader_protect)
866*4882a593Smuzhiyun 		priv->crtc_funcs[pipe]->loader_protect(crtc, false, NULL);
867*4882a593Smuzhiyun error_conn:
868*4882a593Smuzhiyun 	if (set->sub_dev->loader_protect)
869*4882a593Smuzhiyun 		set->sub_dev->loader_protect(conn_state->best_encoder, false);
870*4882a593Smuzhiyun 
871*4882a593Smuzhiyun 	return ret;
872*4882a593Smuzhiyun }
873*4882a593Smuzhiyun 
update_state(struct drm_device * drm_dev,struct drm_atomic_state * state,struct rockchip_drm_mode_set * set,unsigned int * plane_mask)874*4882a593Smuzhiyun static int update_state(struct drm_device *drm_dev,
875*4882a593Smuzhiyun 			struct drm_atomic_state *state,
876*4882a593Smuzhiyun 			struct rockchip_drm_mode_set *set,
877*4882a593Smuzhiyun 			unsigned int *plane_mask)
878*4882a593Smuzhiyun {
879*4882a593Smuzhiyun 	struct drm_crtc *crtc = set->crtc;
880*4882a593Smuzhiyun 	struct drm_connector *connector = set->sub_dev->connector;
881*4882a593Smuzhiyun 	struct drm_display_mode *mode = set->mode;
882*4882a593Smuzhiyun 	struct drm_plane_state *primary_state;
883*4882a593Smuzhiyun 	struct drm_crtc_state *crtc_state;
884*4882a593Smuzhiyun 	struct drm_connector_state *conn_state;
885*4882a593Smuzhiyun 	int ret;
886*4882a593Smuzhiyun 	struct rockchip_crtc_state *s;
887*4882a593Smuzhiyun 
888*4882a593Smuzhiyun 	crtc_state = drm_atomic_get_crtc_state(state, crtc);
889*4882a593Smuzhiyun 	if (IS_ERR(crtc_state))
890*4882a593Smuzhiyun 		return PTR_ERR(crtc_state);
891*4882a593Smuzhiyun 	conn_state = drm_atomic_get_connector_state(state, connector);
892*4882a593Smuzhiyun 	if (IS_ERR(conn_state))
893*4882a593Smuzhiyun 		return PTR_ERR(conn_state);
894*4882a593Smuzhiyun 	s = to_rockchip_crtc_state(crtc_state);
895*4882a593Smuzhiyun 	s->left_margin = set->left_margin;
896*4882a593Smuzhiyun 	s->right_margin = set->right_margin;
897*4882a593Smuzhiyun 	s->top_margin = set->top_margin;
898*4882a593Smuzhiyun 	s->bottom_margin = set->bottom_margin;
899*4882a593Smuzhiyun 
900*4882a593Smuzhiyun 	if (set->mode_changed) {
901*4882a593Smuzhiyun 		ret = drm_atomic_set_crtc_for_connector(conn_state, crtc);
902*4882a593Smuzhiyun 		if (ret)
903*4882a593Smuzhiyun 			return ret;
904*4882a593Smuzhiyun 
905*4882a593Smuzhiyun 		ret = drm_atomic_set_mode_for_crtc(crtc_state, mode);
906*4882a593Smuzhiyun 		if (ret)
907*4882a593Smuzhiyun 			return ret;
908*4882a593Smuzhiyun 
909*4882a593Smuzhiyun 		crtc_state->active = true;
910*4882a593Smuzhiyun 	} else {
911*4882a593Smuzhiyun 		const struct drm_encoder_helper_funcs *encoder_helper_funcs;
912*4882a593Smuzhiyun 		const struct drm_connector_helper_funcs *connector_helper_funcs;
913*4882a593Smuzhiyun 		struct drm_encoder *encoder;
914*4882a593Smuzhiyun 		struct drm_bridge *bridge;
915*4882a593Smuzhiyun 
916*4882a593Smuzhiyun 		connector_helper_funcs = connector->helper_private;
917*4882a593Smuzhiyun 		if (!connector_helper_funcs)
918*4882a593Smuzhiyun 			return -ENXIO;
919*4882a593Smuzhiyun 		if (connector_helper_funcs->best_encoder)
920*4882a593Smuzhiyun 			encoder = connector_helper_funcs->best_encoder(connector);
921*4882a593Smuzhiyun 		else
922*4882a593Smuzhiyun 			encoder = rockchip_drm_connector_get_single_encoder(connector);
923*4882a593Smuzhiyun 		if (!encoder)
924*4882a593Smuzhiyun 			return -ENXIO;
925*4882a593Smuzhiyun 		encoder_helper_funcs = encoder->helper_private;
926*4882a593Smuzhiyun 		if (!encoder_helper_funcs->atomic_check)
927*4882a593Smuzhiyun 			return -ENXIO;
928*4882a593Smuzhiyun 		ret = encoder_helper_funcs->atomic_check(encoder, crtc->state,
929*4882a593Smuzhiyun 							 conn_state);
930*4882a593Smuzhiyun 		if (ret)
931*4882a593Smuzhiyun 			return ret;
932*4882a593Smuzhiyun 
933*4882a593Smuzhiyun 		if (encoder_helper_funcs->atomic_mode_set)
934*4882a593Smuzhiyun 			encoder_helper_funcs->atomic_mode_set(encoder,
935*4882a593Smuzhiyun 							      crtc_state,
936*4882a593Smuzhiyun 							      conn_state);
937*4882a593Smuzhiyun 		else if (encoder_helper_funcs->mode_set)
938*4882a593Smuzhiyun 			encoder_helper_funcs->mode_set(encoder, mode, mode);
939*4882a593Smuzhiyun 
940*4882a593Smuzhiyun 		bridge = drm_bridge_chain_get_first_bridge(encoder);
941*4882a593Smuzhiyun 		drm_bridge_chain_mode_set(bridge, mode, mode);
942*4882a593Smuzhiyun 	}
943*4882a593Smuzhiyun 
944*4882a593Smuzhiyun 	primary_state = drm_atomic_get_plane_state(state, crtc->primary);
945*4882a593Smuzhiyun 	if (IS_ERR(primary_state))
946*4882a593Smuzhiyun 		return PTR_ERR(primary_state);
947*4882a593Smuzhiyun 
948*4882a593Smuzhiyun 	crtc_state->plane_mask = 1 << drm_plane_index(crtc->primary);
949*4882a593Smuzhiyun 	*plane_mask |= crtc_state->plane_mask;
950*4882a593Smuzhiyun 
951*4882a593Smuzhiyun 
952*4882a593Smuzhiyun 	drm_atomic_set_fb_for_plane(primary_state, set->fb);
953*4882a593Smuzhiyun 	drm_framebuffer_put(set->fb);
954*4882a593Smuzhiyun 	ret = drm_atomic_set_crtc_for_plane(primary_state, crtc);
955*4882a593Smuzhiyun 
956*4882a593Smuzhiyun 	return ret;
957*4882a593Smuzhiyun }
958*4882a593Smuzhiyun 
rockchip_drm_copy_mode_from_mode_set(struct drm_display_mode * mode,struct rockchip_drm_mode_set * set)959*4882a593Smuzhiyun static void rockchip_drm_copy_mode_from_mode_set(struct drm_display_mode *mode,
960*4882a593Smuzhiyun 						 struct rockchip_drm_mode_set *set)
961*4882a593Smuzhiyun {
962*4882a593Smuzhiyun 	mode->clock = set->clock;
963*4882a593Smuzhiyun 	mode->hdisplay = set->hdisplay;
964*4882a593Smuzhiyun 	mode->vdisplay = set->vdisplay;
965*4882a593Smuzhiyun 	mode->crtc_hsync_end = set->crtc_hsync_end;
966*4882a593Smuzhiyun 	mode->crtc_vsync_end = set->crtc_vsync_end;
967*4882a593Smuzhiyun 	mode->flags = set->flags & DRM_MODE_FLAG_ALL;
968*4882a593Smuzhiyun 	mode->picture_aspect_ratio = set->picture_aspect_ratio;
969*4882a593Smuzhiyun }
970*4882a593Smuzhiyun 
rockchip_drm_show_logo(struct drm_device * drm_dev)971*4882a593Smuzhiyun void rockchip_drm_show_logo(struct drm_device *drm_dev)
972*4882a593Smuzhiyun {
973*4882a593Smuzhiyun 	struct drm_atomic_state *state, *old_state;
974*4882a593Smuzhiyun 	struct device_node *np = drm_dev->dev->of_node;
975*4882a593Smuzhiyun 	struct drm_mode_config *mode_config = &drm_dev->mode_config;
976*4882a593Smuzhiyun 	struct rockchip_drm_private *private = drm_dev->dev_private;
977*4882a593Smuzhiyun 	struct device_node *root, *route;
978*4882a593Smuzhiyun 	struct rockchip_drm_mode_set *set, *tmp, *unset;
979*4882a593Smuzhiyun 	struct list_head mode_set_list;
980*4882a593Smuzhiyun 	struct list_head mode_unset_list;
981*4882a593Smuzhiyun 	unsigned int plane_mask = 0;
982*4882a593Smuzhiyun 	struct drm_crtc *crtc;
983*4882a593Smuzhiyun 	int ret, i;
984*4882a593Smuzhiyun 
985*4882a593Smuzhiyun 	root = of_get_child_by_name(np, "route");
986*4882a593Smuzhiyun 	if (!root) {
987*4882a593Smuzhiyun 		dev_warn(drm_dev->dev, "failed to parse resources for logo display\n");
988*4882a593Smuzhiyun 		return;
989*4882a593Smuzhiyun 	}
990*4882a593Smuzhiyun 
991*4882a593Smuzhiyun 	if (init_loader_memory(drm_dev)) {
992*4882a593Smuzhiyun 		dev_warn(drm_dev->dev, "failed to parse loader memory\n");
993*4882a593Smuzhiyun 		return;
994*4882a593Smuzhiyun 	}
995*4882a593Smuzhiyun 
996*4882a593Smuzhiyun 	INIT_LIST_HEAD(&mode_set_list);
997*4882a593Smuzhiyun 	INIT_LIST_HEAD(&mode_unset_list);
998*4882a593Smuzhiyun 	drm_modeset_lock_all(drm_dev);
999*4882a593Smuzhiyun 	state = drm_atomic_state_alloc(drm_dev);
1000*4882a593Smuzhiyun 	if (!state) {
1001*4882a593Smuzhiyun 		dev_err(drm_dev->dev, "failed to alloc atomic state for logo display\n");
1002*4882a593Smuzhiyun 		ret = -ENOMEM;
1003*4882a593Smuzhiyun 		goto err_unlock;
1004*4882a593Smuzhiyun 	}
1005*4882a593Smuzhiyun 
1006*4882a593Smuzhiyun 	state->acquire_ctx = mode_config->acquire_ctx;
1007*4882a593Smuzhiyun 
1008*4882a593Smuzhiyun 	for_each_child_of_node(root, route) {
1009*4882a593Smuzhiyun 		if (!of_device_is_available(route))
1010*4882a593Smuzhiyun 			continue;
1011*4882a593Smuzhiyun 
1012*4882a593Smuzhiyun 		set = of_parse_display_resource(drm_dev, route);
1013*4882a593Smuzhiyun 		if (!set)
1014*4882a593Smuzhiyun 			continue;
1015*4882a593Smuzhiyun 
1016*4882a593Smuzhiyun 		if (setup_initial_state(drm_dev, state, set)) {
1017*4882a593Smuzhiyun 			drm_framebuffer_put(set->fb);
1018*4882a593Smuzhiyun 			INIT_LIST_HEAD(&set->head);
1019*4882a593Smuzhiyun 			list_add_tail(&set->head, &mode_unset_list);
1020*4882a593Smuzhiyun 			continue;
1021*4882a593Smuzhiyun 		}
1022*4882a593Smuzhiyun 
1023*4882a593Smuzhiyun 		INIT_LIST_HEAD(&set->head);
1024*4882a593Smuzhiyun 		list_add_tail(&set->head, &mode_set_list);
1025*4882a593Smuzhiyun 	}
1026*4882a593Smuzhiyun 
1027*4882a593Smuzhiyun 	/*
1028*4882a593Smuzhiyun 	 * the mode_unset_list store the unconnected route, if route's crtc
1029*4882a593Smuzhiyun 	 * isn't used, we should close it.
1030*4882a593Smuzhiyun 	 */
1031*4882a593Smuzhiyun 	list_for_each_entry_safe(unset, tmp, &mode_unset_list, head) {
1032*4882a593Smuzhiyun 		struct rockchip_drm_mode_set *tmp_set;
1033*4882a593Smuzhiyun 		int find_used_crtc = 0;
1034*4882a593Smuzhiyun 
1035*4882a593Smuzhiyun 		list_for_each_entry_safe(set, tmp_set, &mode_set_list, head) {
1036*4882a593Smuzhiyun 			if (set->crtc == unset->crtc) {
1037*4882a593Smuzhiyun 				find_used_crtc = 1;
1038*4882a593Smuzhiyun 				continue;
1039*4882a593Smuzhiyun 			}
1040*4882a593Smuzhiyun 		}
1041*4882a593Smuzhiyun 
1042*4882a593Smuzhiyun 		if (!find_used_crtc) {
1043*4882a593Smuzhiyun 			struct drm_crtc *crtc = unset->crtc;
1044*4882a593Smuzhiyun 			struct drm_crtc_state *crtc_state;
1045*4882a593Smuzhiyun 			int pipe = drm_crtc_index(crtc);
1046*4882a593Smuzhiyun 			struct rockchip_drm_private *priv =
1047*4882a593Smuzhiyun 							drm_dev->dev_private;
1048*4882a593Smuzhiyun 
1049*4882a593Smuzhiyun 			/*
1050*4882a593Smuzhiyun 			 * The display timing information of mode_set is parsed from dts, which
1051*4882a593Smuzhiyun 			 * written in uboot. If the mode_set is added into mode_unset_list, it
1052*4882a593Smuzhiyun 			 * should be converted to crtc_state->adjusted_mode, in order to check
1053*4882a593Smuzhiyun 			 * splice_mode flag in loader_protect().
1054*4882a593Smuzhiyun 			 */
1055*4882a593Smuzhiyun 			if (unset->hdisplay && unset->vdisplay) {
1056*4882a593Smuzhiyun 				crtc_state = drm_atomic_get_crtc_state(state, crtc);
1057*4882a593Smuzhiyun 				if (crtc_state)
1058*4882a593Smuzhiyun 					rockchip_drm_copy_mode_from_mode_set(&crtc_state->adjusted_mode,
1059*4882a593Smuzhiyun 									     unset);
1060*4882a593Smuzhiyun 				if (priv->crtc_funcs[pipe] &&
1061*4882a593Smuzhiyun 				    priv->crtc_funcs[pipe]->loader_protect)
1062*4882a593Smuzhiyun 					priv->crtc_funcs[pipe]->loader_protect(crtc, true,
1063*4882a593Smuzhiyun 									       &set->csc);
1064*4882a593Smuzhiyun 				priv->crtc_funcs[pipe]->crtc_close(crtc);
1065*4882a593Smuzhiyun 				if (priv->crtc_funcs[pipe] &&
1066*4882a593Smuzhiyun 				    priv->crtc_funcs[pipe]->loader_protect)
1067*4882a593Smuzhiyun 					priv->crtc_funcs[pipe]->loader_protect(crtc, false, NULL);
1068*4882a593Smuzhiyun 			}
1069*4882a593Smuzhiyun 		}
1070*4882a593Smuzhiyun 
1071*4882a593Smuzhiyun 		list_del(&unset->head);
1072*4882a593Smuzhiyun 		kfree(unset);
1073*4882a593Smuzhiyun 	}
1074*4882a593Smuzhiyun 
1075*4882a593Smuzhiyun 	if (list_empty(&mode_set_list)) {
1076*4882a593Smuzhiyun 		dev_warn(drm_dev->dev, "can't not find any logo display\n");
1077*4882a593Smuzhiyun 		ret = -ENXIO;
1078*4882a593Smuzhiyun 		goto err_free_state;
1079*4882a593Smuzhiyun 	}
1080*4882a593Smuzhiyun 
1081*4882a593Smuzhiyun 	/*
1082*4882a593Smuzhiyun 	 * The state save initial devices status, swap the state into
1083*4882a593Smuzhiyun 	 * drm devices as old state, so if new state come, can compare
1084*4882a593Smuzhiyun 	 * with this state to judge which status need to update.
1085*4882a593Smuzhiyun 	 */
1086*4882a593Smuzhiyun 	WARN_ON(drm_atomic_helper_swap_state(state, false));
1087*4882a593Smuzhiyun 	drm_atomic_state_put(state);
1088*4882a593Smuzhiyun 	old_state = drm_atomic_helper_duplicate_state(drm_dev,
1089*4882a593Smuzhiyun 						      mode_config->acquire_ctx);
1090*4882a593Smuzhiyun 	if (IS_ERR(old_state)) {
1091*4882a593Smuzhiyun 		dev_err(drm_dev->dev, "failed to duplicate atomic state for logo display\n");
1092*4882a593Smuzhiyun 		ret = PTR_ERR_OR_ZERO(old_state);
1093*4882a593Smuzhiyun 		goto err_free_state;
1094*4882a593Smuzhiyun 	}
1095*4882a593Smuzhiyun 
1096*4882a593Smuzhiyun 	state = drm_atomic_helper_duplicate_state(drm_dev,
1097*4882a593Smuzhiyun 						  mode_config->acquire_ctx);
1098*4882a593Smuzhiyun 	if (IS_ERR(state)) {
1099*4882a593Smuzhiyun 		dev_err(drm_dev->dev, "failed to duplicate atomic state for logo display\n");
1100*4882a593Smuzhiyun 		ret = PTR_ERR_OR_ZERO(state);
1101*4882a593Smuzhiyun 		goto err_free_old_state;
1102*4882a593Smuzhiyun 	}
1103*4882a593Smuzhiyun 	state->acquire_ctx = mode_config->acquire_ctx;
1104*4882a593Smuzhiyun 
1105*4882a593Smuzhiyun 	list_for_each_entry(set, &mode_set_list, head)
1106*4882a593Smuzhiyun 		/*
1107*4882a593Smuzhiyun 		 * We don't want to see any fail on update_state.
1108*4882a593Smuzhiyun 		 */
1109*4882a593Smuzhiyun 		WARN_ON(update_state(drm_dev, state, set, &plane_mask));
1110*4882a593Smuzhiyun 
1111*4882a593Smuzhiyun 	for (i = 0; i < state->num_connector; i++) {
1112*4882a593Smuzhiyun 		if (state->connectors[i].new_state->connector->status !=
1113*4882a593Smuzhiyun 		    connector_status_connected)
1114*4882a593Smuzhiyun 			state->connectors[i].new_state->best_encoder = NULL;
1115*4882a593Smuzhiyun 	}
1116*4882a593Smuzhiyun 
1117*4882a593Smuzhiyun 	ret = drm_atomic_commit(state);
1118*4882a593Smuzhiyun 	/**
1119*4882a593Smuzhiyun 	 * todo
1120*4882a593Smuzhiyun 	 * drm_atomic_clean_old_fb(drm_dev, plane_mask, ret);
1121*4882a593Smuzhiyun 	 */
1122*4882a593Smuzhiyun 
1123*4882a593Smuzhiyun 	list_for_each_entry_safe(set, tmp, &mode_set_list, head) {
1124*4882a593Smuzhiyun 		if (set->force_output)
1125*4882a593Smuzhiyun 			set->sub_dev->connector->force = DRM_FORCE_UNSPECIFIED;
1126*4882a593Smuzhiyun 		list_del(&set->head);
1127*4882a593Smuzhiyun 		kfree(set);
1128*4882a593Smuzhiyun 	}
1129*4882a593Smuzhiyun 
1130*4882a593Smuzhiyun 	/*
1131*4882a593Smuzhiyun 	 * Is possible get deadlock here?
1132*4882a593Smuzhiyun 	 */
1133*4882a593Smuzhiyun 	WARN_ON(ret == -EDEADLK);
1134*4882a593Smuzhiyun 
1135*4882a593Smuzhiyun 	if (ret) {
1136*4882a593Smuzhiyun 		/*
1137*4882a593Smuzhiyun 		 * restore display status if atomic commit failed.
1138*4882a593Smuzhiyun 		 */
1139*4882a593Smuzhiyun 		WARN_ON(drm_atomic_helper_swap_state(old_state, false));
1140*4882a593Smuzhiyun 		goto err_free_state;
1141*4882a593Smuzhiyun 	}
1142*4882a593Smuzhiyun 
1143*4882a593Smuzhiyun 	rockchip_free_loader_memory(drm_dev);
1144*4882a593Smuzhiyun 	drm_atomic_state_put(old_state);
1145*4882a593Smuzhiyun 	drm_atomic_state_put(state);
1146*4882a593Smuzhiyun 
1147*4882a593Smuzhiyun 	private->loader_protect = true;
1148*4882a593Smuzhiyun 	drm_modeset_unlock_all(drm_dev);
1149*4882a593Smuzhiyun 
1150*4882a593Smuzhiyun 	if (private->fbdev_helper && private->fbdev_helper->fb) {
1151*4882a593Smuzhiyun 		drm_for_each_crtc(crtc, drm_dev) {
1152*4882a593Smuzhiyun 			struct rockchip_crtc_state *s = NULL;
1153*4882a593Smuzhiyun 
1154*4882a593Smuzhiyun 			s = to_rockchip_crtc_state(crtc->state);
1155*4882a593Smuzhiyun 			if (is_support_hotplug(s->output_type))
1156*4882a593Smuzhiyun 				drm_framebuffer_get(private->fbdev_helper->fb);
1157*4882a593Smuzhiyun 		}
1158*4882a593Smuzhiyun 	}
1159*4882a593Smuzhiyun 
1160*4882a593Smuzhiyun 	return;
1161*4882a593Smuzhiyun err_free_old_state:
1162*4882a593Smuzhiyun 	drm_atomic_state_put(old_state);
1163*4882a593Smuzhiyun err_free_state:
1164*4882a593Smuzhiyun 	drm_atomic_state_put(state);
1165*4882a593Smuzhiyun err_unlock:
1166*4882a593Smuzhiyun 	drm_modeset_unlock_all(drm_dev);
1167*4882a593Smuzhiyun 	if (ret)
1168*4882a593Smuzhiyun 		dev_err(drm_dev->dev, "failed to show kernel logo\n");
1169*4882a593Smuzhiyun }
1170*4882a593Smuzhiyun 
1171*4882a593Smuzhiyun #ifndef MODULE
1172*4882a593Smuzhiyun static const char *const loader_protect_clocks[] __initconst = {
1173*4882a593Smuzhiyun 	"hclk_vio",
1174*4882a593Smuzhiyun 	"hclk_vop",
1175*4882a593Smuzhiyun 	"hclk_vopb",
1176*4882a593Smuzhiyun 	"hclk_vopl",
1177*4882a593Smuzhiyun 	"aclk_vio",
1178*4882a593Smuzhiyun 	"aclk_vio0",
1179*4882a593Smuzhiyun 	"aclk_vio1",
1180*4882a593Smuzhiyun 	"aclk_vop",
1181*4882a593Smuzhiyun 	"aclk_vopb",
1182*4882a593Smuzhiyun 	"aclk_vopl",
1183*4882a593Smuzhiyun 	"aclk_vo_pre",
1184*4882a593Smuzhiyun 	"aclk_vio_pre",
1185*4882a593Smuzhiyun 	"dclk_vop",
1186*4882a593Smuzhiyun 	"dclk_vop0",
1187*4882a593Smuzhiyun 	"dclk_vop1",
1188*4882a593Smuzhiyun 	"dclk_vopb",
1189*4882a593Smuzhiyun 	"dclk_vopl",
1190*4882a593Smuzhiyun };
1191*4882a593Smuzhiyun 
1192*4882a593Smuzhiyun static struct clk **loader_clocks __initdata;
rockchip_clocks_loader_protect(void)1193*4882a593Smuzhiyun static int __init rockchip_clocks_loader_protect(void)
1194*4882a593Smuzhiyun {
1195*4882a593Smuzhiyun 	int nclocks = ARRAY_SIZE(loader_protect_clocks);
1196*4882a593Smuzhiyun 	struct clk *clk;
1197*4882a593Smuzhiyun 	int i;
1198*4882a593Smuzhiyun 
1199*4882a593Smuzhiyun 	loader_clocks = kcalloc(nclocks, sizeof(void *), GFP_KERNEL);
1200*4882a593Smuzhiyun 	if (!loader_clocks)
1201*4882a593Smuzhiyun 		return -ENOMEM;
1202*4882a593Smuzhiyun 
1203*4882a593Smuzhiyun 	for (i = 0; i < nclocks; i++) {
1204*4882a593Smuzhiyun 		clk = __clk_lookup(loader_protect_clocks[i]);
1205*4882a593Smuzhiyun 
1206*4882a593Smuzhiyun 		if (clk) {
1207*4882a593Smuzhiyun 			loader_clocks[i] = clk;
1208*4882a593Smuzhiyun 			clk_prepare_enable(clk);
1209*4882a593Smuzhiyun 		}
1210*4882a593Smuzhiyun 	}
1211*4882a593Smuzhiyun 
1212*4882a593Smuzhiyun 	return 0;
1213*4882a593Smuzhiyun }
1214*4882a593Smuzhiyun arch_initcall_sync(rockchip_clocks_loader_protect);
1215*4882a593Smuzhiyun 
rockchip_clocks_loader_unprotect(void)1216*4882a593Smuzhiyun static int __init rockchip_clocks_loader_unprotect(void)
1217*4882a593Smuzhiyun {
1218*4882a593Smuzhiyun 	int i;
1219*4882a593Smuzhiyun 
1220*4882a593Smuzhiyun 	if (!loader_clocks)
1221*4882a593Smuzhiyun 		return -ENODEV;
1222*4882a593Smuzhiyun 
1223*4882a593Smuzhiyun 	for (i = 0; i < ARRAY_SIZE(loader_protect_clocks); i++) {
1224*4882a593Smuzhiyun 		struct clk *clk = loader_clocks[i];
1225*4882a593Smuzhiyun 
1226*4882a593Smuzhiyun 		if (clk)
1227*4882a593Smuzhiyun 			clk_disable_unprepare(clk);
1228*4882a593Smuzhiyun 	}
1229*4882a593Smuzhiyun 	kfree(loader_clocks);
1230*4882a593Smuzhiyun 
1231*4882a593Smuzhiyun 	return 0;
1232*4882a593Smuzhiyun }
1233*4882a593Smuzhiyun late_initcall_sync(rockchip_clocks_loader_unprotect);
1234*4882a593Smuzhiyun #endif
1235