xref: /OK3568_Linux_fs/kernel/drivers/gpu/drm/i915/gt/intel_sseu.h (revision 4882a59341e53eb6f0b4789bf948001014eff981)
1*4882a593Smuzhiyun /*
2*4882a593Smuzhiyun  * SPDX-License-Identifier: MIT
3*4882a593Smuzhiyun  *
4*4882a593Smuzhiyun  * Copyright © 2019 Intel Corporation
5*4882a593Smuzhiyun  */
6*4882a593Smuzhiyun 
7*4882a593Smuzhiyun #ifndef __INTEL_SSEU_H__
8*4882a593Smuzhiyun #define __INTEL_SSEU_H__
9*4882a593Smuzhiyun 
10*4882a593Smuzhiyun #include <linux/types.h>
11*4882a593Smuzhiyun #include <linux/kernel.h>
12*4882a593Smuzhiyun 
13*4882a593Smuzhiyun #include "i915_gem.h"
14*4882a593Smuzhiyun 
15*4882a593Smuzhiyun struct drm_i915_private;
16*4882a593Smuzhiyun struct intel_gt;
17*4882a593Smuzhiyun struct drm_printer;
18*4882a593Smuzhiyun 
19*4882a593Smuzhiyun #define GEN_MAX_SLICES		(6) /* CNL upper bound */
20*4882a593Smuzhiyun #define GEN_MAX_SUBSLICES	(8) /* ICL upper bound */
21*4882a593Smuzhiyun #define GEN_SSEU_STRIDE(max_entries) DIV_ROUND_UP(max_entries, BITS_PER_BYTE)
22*4882a593Smuzhiyun #define GEN_MAX_SUBSLICE_STRIDE GEN_SSEU_STRIDE(GEN_MAX_SUBSLICES)
23*4882a593Smuzhiyun #define GEN_MAX_EUS		(16) /* TGL upper bound */
24*4882a593Smuzhiyun #define GEN_MAX_EU_STRIDE GEN_SSEU_STRIDE(GEN_MAX_EUS)
25*4882a593Smuzhiyun 
26*4882a593Smuzhiyun struct sseu_dev_info {
27*4882a593Smuzhiyun 	u8 slice_mask;
28*4882a593Smuzhiyun 	u8 subslice_mask[GEN_MAX_SLICES * GEN_MAX_SUBSLICE_STRIDE];
29*4882a593Smuzhiyun 	u8 eu_mask[GEN_MAX_SLICES * GEN_MAX_SUBSLICES * GEN_MAX_EU_STRIDE];
30*4882a593Smuzhiyun 	u16 eu_total;
31*4882a593Smuzhiyun 	u8 eu_per_subslice;
32*4882a593Smuzhiyun 	u8 min_eu_in_pool;
33*4882a593Smuzhiyun 	/* For each slice, which subslice(s) has(have) 7 EUs (bitfield)? */
34*4882a593Smuzhiyun 	u8 subslice_7eu[3];
35*4882a593Smuzhiyun 	u8 has_slice_pg:1;
36*4882a593Smuzhiyun 	u8 has_subslice_pg:1;
37*4882a593Smuzhiyun 	u8 has_eu_pg:1;
38*4882a593Smuzhiyun 
39*4882a593Smuzhiyun 	/* Topology fields */
40*4882a593Smuzhiyun 	u8 max_slices;
41*4882a593Smuzhiyun 	u8 max_subslices;
42*4882a593Smuzhiyun 	u8 max_eus_per_subslice;
43*4882a593Smuzhiyun 
44*4882a593Smuzhiyun 	u8 ss_stride;
45*4882a593Smuzhiyun 	u8 eu_stride;
46*4882a593Smuzhiyun };
47*4882a593Smuzhiyun 
48*4882a593Smuzhiyun /*
49*4882a593Smuzhiyun  * Powergating configuration for a particular (context,engine).
50*4882a593Smuzhiyun  */
51*4882a593Smuzhiyun struct intel_sseu {
52*4882a593Smuzhiyun 	u8 slice_mask;
53*4882a593Smuzhiyun 	u8 subslice_mask;
54*4882a593Smuzhiyun 	u8 min_eus_per_subslice;
55*4882a593Smuzhiyun 	u8 max_eus_per_subslice;
56*4882a593Smuzhiyun };
57*4882a593Smuzhiyun 
58*4882a593Smuzhiyun static inline struct intel_sseu
intel_sseu_from_device_info(const struct sseu_dev_info * sseu)59*4882a593Smuzhiyun intel_sseu_from_device_info(const struct sseu_dev_info *sseu)
60*4882a593Smuzhiyun {
61*4882a593Smuzhiyun 	struct intel_sseu value = {
62*4882a593Smuzhiyun 		.slice_mask = sseu->slice_mask,
63*4882a593Smuzhiyun 		.subslice_mask = sseu->subslice_mask[0],
64*4882a593Smuzhiyun 		.min_eus_per_subslice = sseu->max_eus_per_subslice,
65*4882a593Smuzhiyun 		.max_eus_per_subslice = sseu->max_eus_per_subslice,
66*4882a593Smuzhiyun 	};
67*4882a593Smuzhiyun 
68*4882a593Smuzhiyun 	return value;
69*4882a593Smuzhiyun }
70*4882a593Smuzhiyun 
71*4882a593Smuzhiyun static inline bool
intel_sseu_has_subslice(const struct sseu_dev_info * sseu,int slice,int subslice)72*4882a593Smuzhiyun intel_sseu_has_subslice(const struct sseu_dev_info *sseu, int slice,
73*4882a593Smuzhiyun 			int subslice)
74*4882a593Smuzhiyun {
75*4882a593Smuzhiyun 	u8 mask;
76*4882a593Smuzhiyun 	int ss_idx = subslice / BITS_PER_BYTE;
77*4882a593Smuzhiyun 
78*4882a593Smuzhiyun 	GEM_BUG_ON(ss_idx >= sseu->ss_stride);
79*4882a593Smuzhiyun 
80*4882a593Smuzhiyun 	mask = sseu->subslice_mask[slice * sseu->ss_stride + ss_idx];
81*4882a593Smuzhiyun 
82*4882a593Smuzhiyun 	return mask & BIT(subslice % BITS_PER_BYTE);
83*4882a593Smuzhiyun }
84*4882a593Smuzhiyun 
85*4882a593Smuzhiyun void intel_sseu_set_info(struct sseu_dev_info *sseu, u8 max_slices,
86*4882a593Smuzhiyun 			 u8 max_subslices, u8 max_eus_per_subslice);
87*4882a593Smuzhiyun 
88*4882a593Smuzhiyun unsigned int
89*4882a593Smuzhiyun intel_sseu_subslice_total(const struct sseu_dev_info *sseu);
90*4882a593Smuzhiyun 
91*4882a593Smuzhiyun unsigned int
92*4882a593Smuzhiyun intel_sseu_subslices_per_slice(const struct sseu_dev_info *sseu, u8 slice);
93*4882a593Smuzhiyun 
94*4882a593Smuzhiyun u32  intel_sseu_get_subslices(const struct sseu_dev_info *sseu, u8 slice);
95*4882a593Smuzhiyun 
96*4882a593Smuzhiyun void intel_sseu_set_subslices(struct sseu_dev_info *sseu, int slice,
97*4882a593Smuzhiyun 			      u32 ss_mask);
98*4882a593Smuzhiyun 
99*4882a593Smuzhiyun void intel_sseu_info_init(struct intel_gt *gt);
100*4882a593Smuzhiyun 
101*4882a593Smuzhiyun u32 intel_sseu_make_rpcs(struct intel_gt *gt,
102*4882a593Smuzhiyun 			 const struct intel_sseu *req_sseu);
103*4882a593Smuzhiyun 
104*4882a593Smuzhiyun void intel_sseu_dump(const struct sseu_dev_info *sseu, struct drm_printer *p);
105*4882a593Smuzhiyun void intel_sseu_print_topology(const struct sseu_dev_info *sseu,
106*4882a593Smuzhiyun 			       struct drm_printer *p);
107*4882a593Smuzhiyun 
108*4882a593Smuzhiyun #endif /* __INTEL_SSEU_H__ */
109