1*4882a593Smuzhiyun /*
2*4882a593Smuzhiyun * Copyright © 2006 Intel Corporation
3*4882a593Smuzhiyun *
4*4882a593Smuzhiyun * Permission is hereby granted, free of charge, to any person obtaining a
5*4882a593Smuzhiyun * copy of this software and associated documentation files (the "Software"),
6*4882a593Smuzhiyun * to deal in the Software without restriction, including without limitation
7*4882a593Smuzhiyun * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8*4882a593Smuzhiyun * and/or sell copies of the Software, and to permit persons to whom the
9*4882a593Smuzhiyun * Software is furnished to do so, subject to the following conditions:
10*4882a593Smuzhiyun *
11*4882a593Smuzhiyun * The above copyright notice and this permission notice (including the next
12*4882a593Smuzhiyun * paragraph) shall be included in all copies or substantial portions of the
13*4882a593Smuzhiyun * Software.
14*4882a593Smuzhiyun *
15*4882a593Smuzhiyun * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16*4882a593Smuzhiyun * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17*4882a593Smuzhiyun * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
18*4882a593Smuzhiyun * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19*4882a593Smuzhiyun * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20*4882a593Smuzhiyun * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21*4882a593Smuzhiyun * SOFTWARE.
22*4882a593Smuzhiyun *
23*4882a593Smuzhiyun * Authors:
24*4882a593Smuzhiyun * Eric Anholt <eric@anholt.net>
25*4882a593Smuzhiyun * Michel Dänzer <michel@tungstengraphics.com>
26*4882a593Smuzhiyun *
27*4882a593Smuzhiyun */
28*4882a593Smuzhiyun
29*4882a593Smuzhiyun #ifdef HAVE_DIX_CONFIG_H
30*4882a593Smuzhiyun #include <dix-config.h>
31*4882a593Smuzhiyun #endif
32*4882a593Smuzhiyun
33*4882a593Smuzhiyun #include <string.h>
34*4882a593Smuzhiyun
35*4882a593Smuzhiyun #include "exa_priv.h"
36*4882a593Smuzhiyun #include "exa.h"
37*4882a593Smuzhiyun
38*4882a593Smuzhiyun #if DEBUG_MIGRATE
39*4882a593Smuzhiyun #define DBG_MIGRATE(a) ErrorF a
40*4882a593Smuzhiyun #else
41*4882a593Smuzhiyun #define DBG_MIGRATE(a)
42*4882a593Smuzhiyun #endif
43*4882a593Smuzhiyun
44*4882a593Smuzhiyun /**
45*4882a593Smuzhiyun * The fallback path for UTS/DFS failing is to just memcpy. exaCopyDirtyToSys
46*4882a593Smuzhiyun * and exaCopyDirtyToFb both needed to do this loop.
47*4882a593Smuzhiyun */
48*4882a593Smuzhiyun static void
exaMemcpyBox(PixmapPtr pPixmap,BoxPtr pbox,CARD8 * src,int src_pitch,CARD8 * dst,int dst_pitch)49*4882a593Smuzhiyun exaMemcpyBox(PixmapPtr pPixmap, BoxPtr pbox, CARD8 *src, int src_pitch,
50*4882a593Smuzhiyun CARD8 *dst, int dst_pitch)
51*4882a593Smuzhiyun {
52*4882a593Smuzhiyun int i, cpp = pPixmap->drawable.bitsPerPixel / 8;
53*4882a593Smuzhiyun int bytes = (pbox->x2 - pbox->x1) * cpp;
54*4882a593Smuzhiyun
55*4882a593Smuzhiyun src += pbox->y1 * src_pitch + pbox->x1 * cpp;
56*4882a593Smuzhiyun dst += pbox->y1 * dst_pitch + pbox->x1 * cpp;
57*4882a593Smuzhiyun
58*4882a593Smuzhiyun for (i = pbox->y2 - pbox->y1; i; i--) {
59*4882a593Smuzhiyun memcpy(dst, src, bytes);
60*4882a593Smuzhiyun src += src_pitch;
61*4882a593Smuzhiyun dst += dst_pitch;
62*4882a593Smuzhiyun }
63*4882a593Smuzhiyun }
64*4882a593Smuzhiyun
65*4882a593Smuzhiyun /**
66*4882a593Smuzhiyun * Returns TRUE if the pixmap is dirty (has been modified in its current
67*4882a593Smuzhiyun * location compared to the other), or lacks a private for tracking
68*4882a593Smuzhiyun * dirtiness.
69*4882a593Smuzhiyun */
70*4882a593Smuzhiyun static Bool
exaPixmapIsDirty(PixmapPtr pPix)71*4882a593Smuzhiyun exaPixmapIsDirty(PixmapPtr pPix)
72*4882a593Smuzhiyun {
73*4882a593Smuzhiyun ExaPixmapPriv(pPix);
74*4882a593Smuzhiyun
75*4882a593Smuzhiyun if (pExaPixmap == NULL)
76*4882a593Smuzhiyun EXA_FatalErrorDebugWithRet(("EXA bug: exaPixmapIsDirty was called on a non-exa pixmap.\n"), TRUE);
77*4882a593Smuzhiyun
78*4882a593Smuzhiyun if (!pExaPixmap->pDamage)
79*4882a593Smuzhiyun return FALSE;
80*4882a593Smuzhiyun
81*4882a593Smuzhiyun return RegionNotEmpty(DamageRegion(pExaPixmap->pDamage)) ||
82*4882a593Smuzhiyun !RegionEqual(&pExaPixmap->validSys, &pExaPixmap->validFB);
83*4882a593Smuzhiyun }
84*4882a593Smuzhiyun
85*4882a593Smuzhiyun /**
86*4882a593Smuzhiyun * Returns TRUE if the pixmap is either pinned in FB, or has a sufficient score
87*4882a593Smuzhiyun * to be considered "should be in framebuffer". That's just anything that has
88*4882a593Smuzhiyun * had more acceleration than fallbacks, or has no score yet.
89*4882a593Smuzhiyun *
90*4882a593Smuzhiyun * Only valid if using a migration scheme that tracks score.
91*4882a593Smuzhiyun */
92*4882a593Smuzhiyun static Bool
exaPixmapShouldBeInFB(PixmapPtr pPix)93*4882a593Smuzhiyun exaPixmapShouldBeInFB(PixmapPtr pPix)
94*4882a593Smuzhiyun {
95*4882a593Smuzhiyun ExaPixmapPriv(pPix);
96*4882a593Smuzhiyun
97*4882a593Smuzhiyun if (exaPixmapIsPinned(pPix))
98*4882a593Smuzhiyun return TRUE;
99*4882a593Smuzhiyun
100*4882a593Smuzhiyun return pExaPixmap->score >= 0;
101*4882a593Smuzhiyun }
102*4882a593Smuzhiyun
103*4882a593Smuzhiyun /**
104*4882a593Smuzhiyun * If the pixmap is currently dirty, this copies at least the dirty area from
105*4882a593Smuzhiyun * FB to system or vice versa. Both areas must be allocated.
106*4882a593Smuzhiyun */
107*4882a593Smuzhiyun static void
exaCopyDirty(ExaMigrationPtr migrate,RegionPtr pValidDst,RegionPtr pValidSrc,Bool (* transfer)(PixmapPtr pPix,int x,int y,int w,int h,char * sys,int sys_pitch),int fallback_index,void (* sync)(ScreenPtr pScreen))108*4882a593Smuzhiyun exaCopyDirty(ExaMigrationPtr migrate, RegionPtr pValidDst, RegionPtr pValidSrc,
109*4882a593Smuzhiyun Bool (*transfer) (PixmapPtr pPix, int x, int y, int w, int h,
110*4882a593Smuzhiyun char *sys, int sys_pitch), int fallback_index,
111*4882a593Smuzhiyun void (*sync) (ScreenPtr pScreen))
112*4882a593Smuzhiyun {
113*4882a593Smuzhiyun PixmapPtr pPixmap = migrate->pPix;
114*4882a593Smuzhiyun
115*4882a593Smuzhiyun ExaPixmapPriv(pPixmap);
116*4882a593Smuzhiyun RegionPtr damage = DamageRegion(pExaPixmap->pDamage);
117*4882a593Smuzhiyun RegionRec CopyReg;
118*4882a593Smuzhiyun Bool save_use_gpu_copy;
119*4882a593Smuzhiyun int save_pitch;
120*4882a593Smuzhiyun BoxPtr pBox;
121*4882a593Smuzhiyun int nbox;
122*4882a593Smuzhiyun Bool access_prepared = FALSE;
123*4882a593Smuzhiyun Bool need_sync = FALSE;
124*4882a593Smuzhiyun
125*4882a593Smuzhiyun /* Damaged bits are valid in current copy but invalid in other one */
126*4882a593Smuzhiyun if (pExaPixmap->use_gpu_copy) {
127*4882a593Smuzhiyun RegionUnion(&pExaPixmap->validFB, &pExaPixmap->validFB, damage);
128*4882a593Smuzhiyun RegionSubtract(&pExaPixmap->validSys, &pExaPixmap->validSys, damage);
129*4882a593Smuzhiyun }
130*4882a593Smuzhiyun else {
131*4882a593Smuzhiyun RegionUnion(&pExaPixmap->validSys, &pExaPixmap->validSys, damage);
132*4882a593Smuzhiyun RegionSubtract(&pExaPixmap->validFB, &pExaPixmap->validFB, damage);
133*4882a593Smuzhiyun }
134*4882a593Smuzhiyun
135*4882a593Smuzhiyun RegionEmpty(damage);
136*4882a593Smuzhiyun
137*4882a593Smuzhiyun /* Copy bits valid in source but not in destination */
138*4882a593Smuzhiyun RegionNull(&CopyReg);
139*4882a593Smuzhiyun RegionSubtract(&CopyReg, pValidSrc, pValidDst);
140*4882a593Smuzhiyun
141*4882a593Smuzhiyun if (migrate->as_dst) {
142*4882a593Smuzhiyun ExaScreenPriv(pPixmap->drawable.pScreen);
143*4882a593Smuzhiyun
144*4882a593Smuzhiyun /* XXX: The pending damage region will be marked as damaged after the
145*4882a593Smuzhiyun * operation, so it should serve as an upper bound for the region that
146*4882a593Smuzhiyun * needs to be synchronized for the operation. Unfortunately, this
147*4882a593Smuzhiyun * causes corruption in some cases, e.g. when starting compiz. See
148*4882a593Smuzhiyun * https://bugs.freedesktop.org/show_bug.cgi?id=12916 .
149*4882a593Smuzhiyun */
150*4882a593Smuzhiyun if (pExaScr->optimize_migration) {
151*4882a593Smuzhiyun RegionPtr pending_damage = DamagePendingRegion(pExaPixmap->pDamage);
152*4882a593Smuzhiyun
153*4882a593Smuzhiyun #if DEBUG_MIGRATE
154*4882a593Smuzhiyun if (RegionNil(pending_damage)) {
155*4882a593Smuzhiyun static Bool firsttime = TRUE;
156*4882a593Smuzhiyun
157*4882a593Smuzhiyun if (firsttime) {
158*4882a593Smuzhiyun ErrorF("%s: Pending damage region empty!\n", __func__);
159*4882a593Smuzhiyun firsttime = FALSE;
160*4882a593Smuzhiyun }
161*4882a593Smuzhiyun }
162*4882a593Smuzhiyun #endif
163*4882a593Smuzhiyun
164*4882a593Smuzhiyun /* Try to prevent destination valid region from growing too many
165*4882a593Smuzhiyun * rects by filling it up to the extents of the union of the
166*4882a593Smuzhiyun * destination valid region and the pending damage region.
167*4882a593Smuzhiyun */
168*4882a593Smuzhiyun if (RegionNumRects(pValidDst) > 10) {
169*4882a593Smuzhiyun BoxRec box;
170*4882a593Smuzhiyun BoxPtr pValidExt, pDamageExt;
171*4882a593Smuzhiyun RegionRec closure;
172*4882a593Smuzhiyun
173*4882a593Smuzhiyun pValidExt = RegionExtents(pValidDst);
174*4882a593Smuzhiyun pDamageExt = RegionExtents(pending_damage);
175*4882a593Smuzhiyun
176*4882a593Smuzhiyun box.x1 = min(pValidExt->x1, pDamageExt->x1);
177*4882a593Smuzhiyun box.y1 = min(pValidExt->y1, pDamageExt->y1);
178*4882a593Smuzhiyun box.x2 = max(pValidExt->x2, pDamageExt->x2);
179*4882a593Smuzhiyun box.y2 = max(pValidExt->y2, pDamageExt->y2);
180*4882a593Smuzhiyun
181*4882a593Smuzhiyun RegionInit(&closure, &box, 0);
182*4882a593Smuzhiyun RegionIntersect(&CopyReg, &CopyReg, &closure);
183*4882a593Smuzhiyun }
184*4882a593Smuzhiyun else
185*4882a593Smuzhiyun RegionIntersect(&CopyReg, &CopyReg, pending_damage);
186*4882a593Smuzhiyun }
187*4882a593Smuzhiyun
188*4882a593Smuzhiyun /* The caller may provide a region to be subtracted from the calculated
189*4882a593Smuzhiyun * dirty region. This is to avoid migration of bits that don't
190*4882a593Smuzhiyun * contribute to the result of the operation.
191*4882a593Smuzhiyun */
192*4882a593Smuzhiyun if (migrate->pReg)
193*4882a593Smuzhiyun RegionSubtract(&CopyReg, &CopyReg, migrate->pReg);
194*4882a593Smuzhiyun }
195*4882a593Smuzhiyun else {
196*4882a593Smuzhiyun /* The caller may restrict the region to be migrated for source pixmaps
197*4882a593Smuzhiyun * to what's relevant for the operation.
198*4882a593Smuzhiyun */
199*4882a593Smuzhiyun if (migrate->pReg)
200*4882a593Smuzhiyun RegionIntersect(&CopyReg, &CopyReg, migrate->pReg);
201*4882a593Smuzhiyun }
202*4882a593Smuzhiyun
203*4882a593Smuzhiyun pBox = RegionRects(&CopyReg);
204*4882a593Smuzhiyun nbox = RegionNumRects(&CopyReg);
205*4882a593Smuzhiyun
206*4882a593Smuzhiyun save_use_gpu_copy = pExaPixmap->use_gpu_copy;
207*4882a593Smuzhiyun save_pitch = pPixmap->devKind;
208*4882a593Smuzhiyun pExaPixmap->use_gpu_copy = TRUE;
209*4882a593Smuzhiyun pPixmap->devKind = pExaPixmap->fb_pitch;
210*4882a593Smuzhiyun
211*4882a593Smuzhiyun while (nbox--) {
212*4882a593Smuzhiyun pBox->x1 = max(pBox->x1, 0);
213*4882a593Smuzhiyun pBox->y1 = max(pBox->y1, 0);
214*4882a593Smuzhiyun pBox->x2 = min(pBox->x2, pPixmap->drawable.width);
215*4882a593Smuzhiyun pBox->y2 = min(pBox->y2, pPixmap->drawable.height);
216*4882a593Smuzhiyun
217*4882a593Smuzhiyun if (pBox->x1 >= pBox->x2 || pBox->y1 >= pBox->y2)
218*4882a593Smuzhiyun continue;
219*4882a593Smuzhiyun
220*4882a593Smuzhiyun if (!transfer || !transfer(pPixmap,
221*4882a593Smuzhiyun pBox->x1, pBox->y1,
222*4882a593Smuzhiyun pBox->x2 - pBox->x1,
223*4882a593Smuzhiyun pBox->y2 - pBox->y1,
224*4882a593Smuzhiyun (char *) (pExaPixmap->sys_ptr
225*4882a593Smuzhiyun + pBox->y1 * pExaPixmap->sys_pitch
226*4882a593Smuzhiyun +
227*4882a593Smuzhiyun pBox->x1 *
228*4882a593Smuzhiyun pPixmap->drawable.bitsPerPixel /
229*4882a593Smuzhiyun 8), pExaPixmap->sys_pitch)) {
230*4882a593Smuzhiyun if (!access_prepared) {
231*4882a593Smuzhiyun ExaDoPrepareAccess(pPixmap, fallback_index);
232*4882a593Smuzhiyun access_prepared = TRUE;
233*4882a593Smuzhiyun }
234*4882a593Smuzhiyun if (fallback_index == EXA_PREPARE_DEST) {
235*4882a593Smuzhiyun exaMemcpyBox(pPixmap, pBox,
236*4882a593Smuzhiyun pExaPixmap->sys_ptr, pExaPixmap->sys_pitch,
237*4882a593Smuzhiyun pPixmap->devPrivate.ptr, pPixmap->devKind);
238*4882a593Smuzhiyun }
239*4882a593Smuzhiyun else {
240*4882a593Smuzhiyun exaMemcpyBox(pPixmap, pBox,
241*4882a593Smuzhiyun pPixmap->devPrivate.ptr, pPixmap->devKind,
242*4882a593Smuzhiyun pExaPixmap->sys_ptr, pExaPixmap->sys_pitch);
243*4882a593Smuzhiyun }
244*4882a593Smuzhiyun }
245*4882a593Smuzhiyun else
246*4882a593Smuzhiyun need_sync = TRUE;
247*4882a593Smuzhiyun
248*4882a593Smuzhiyun pBox++;
249*4882a593Smuzhiyun }
250*4882a593Smuzhiyun
251*4882a593Smuzhiyun pExaPixmap->use_gpu_copy = save_use_gpu_copy;
252*4882a593Smuzhiyun pPixmap->devKind = save_pitch;
253*4882a593Smuzhiyun
254*4882a593Smuzhiyun /* Try to prevent source valid region from growing too many rects by
255*4882a593Smuzhiyun * removing parts of it which are also in the destination valid region.
256*4882a593Smuzhiyun * Removing anything beyond that would lead to data loss.
257*4882a593Smuzhiyun */
258*4882a593Smuzhiyun if (RegionNumRects(pValidSrc) > 20)
259*4882a593Smuzhiyun RegionSubtract(pValidSrc, pValidSrc, pValidDst);
260*4882a593Smuzhiyun
261*4882a593Smuzhiyun /* The copied bits are now valid in destination */
262*4882a593Smuzhiyun RegionUnion(pValidDst, pValidDst, &CopyReg);
263*4882a593Smuzhiyun
264*4882a593Smuzhiyun RegionUninit(&CopyReg);
265*4882a593Smuzhiyun
266*4882a593Smuzhiyun if (access_prepared)
267*4882a593Smuzhiyun exaFinishAccess(&pPixmap->drawable, fallback_index);
268*4882a593Smuzhiyun else if (need_sync && sync)
269*4882a593Smuzhiyun sync(pPixmap->drawable.pScreen);
270*4882a593Smuzhiyun }
271*4882a593Smuzhiyun
272*4882a593Smuzhiyun /**
273*4882a593Smuzhiyun * If the pixmap is currently dirty, this copies at least the dirty area from
274*4882a593Smuzhiyun * the framebuffer memory copy to the system memory copy. Both areas must be
275*4882a593Smuzhiyun * allocated.
276*4882a593Smuzhiyun */
277*4882a593Smuzhiyun void
exaCopyDirtyToSys(ExaMigrationPtr migrate)278*4882a593Smuzhiyun exaCopyDirtyToSys(ExaMigrationPtr migrate)
279*4882a593Smuzhiyun {
280*4882a593Smuzhiyun PixmapPtr pPixmap = migrate->pPix;
281*4882a593Smuzhiyun
282*4882a593Smuzhiyun ExaScreenPriv(pPixmap->drawable.pScreen);
283*4882a593Smuzhiyun ExaPixmapPriv(pPixmap);
284*4882a593Smuzhiyun
285*4882a593Smuzhiyun exaCopyDirty(migrate, &pExaPixmap->validSys, &pExaPixmap->validFB,
286*4882a593Smuzhiyun pExaScr->info->DownloadFromScreen, EXA_PREPARE_SRC,
287*4882a593Smuzhiyun exaWaitSync);
288*4882a593Smuzhiyun }
289*4882a593Smuzhiyun
290*4882a593Smuzhiyun /**
291*4882a593Smuzhiyun * If the pixmap is currently dirty, this copies at least the dirty area from
292*4882a593Smuzhiyun * the system memory copy to the framebuffer memory copy. Both areas must be
293*4882a593Smuzhiyun * allocated.
294*4882a593Smuzhiyun */
295*4882a593Smuzhiyun void
exaCopyDirtyToFb(ExaMigrationPtr migrate)296*4882a593Smuzhiyun exaCopyDirtyToFb(ExaMigrationPtr migrate)
297*4882a593Smuzhiyun {
298*4882a593Smuzhiyun PixmapPtr pPixmap = migrate->pPix;
299*4882a593Smuzhiyun
300*4882a593Smuzhiyun ExaScreenPriv(pPixmap->drawable.pScreen);
301*4882a593Smuzhiyun ExaPixmapPriv(pPixmap);
302*4882a593Smuzhiyun
303*4882a593Smuzhiyun exaCopyDirty(migrate, &pExaPixmap->validFB, &pExaPixmap->validSys,
304*4882a593Smuzhiyun pExaScr->info->UploadToScreen, EXA_PREPARE_DEST, NULL);
305*4882a593Smuzhiyun }
306*4882a593Smuzhiyun
307*4882a593Smuzhiyun /**
308*4882a593Smuzhiyun * Allocates a framebuffer copy of the pixmap if necessary, and then copies
309*4882a593Smuzhiyun * any necessary pixmap data into the framebuffer copy and points the pixmap at
310*4882a593Smuzhiyun * it.
311*4882a593Smuzhiyun *
312*4882a593Smuzhiyun * Note that when first allocated, a pixmap will have FALSE dirty flag.
313*4882a593Smuzhiyun * This is intentional because pixmap data starts out undefined. So if we move
314*4882a593Smuzhiyun * it in due to the first operation against it being accelerated, it will have
315*4882a593Smuzhiyun * undefined framebuffer contents that we didn't have to upload. If we do
316*4882a593Smuzhiyun * moveouts (and moveins) after the first movein, then we will only have to copy
317*4882a593Smuzhiyun * back and forth if the pixmap was written to after the last synchronization of
318*4882a593Smuzhiyun * the two copies. Then, at exaPixmapSave (when the framebuffer copy goes away)
319*4882a593Smuzhiyun * we mark the pixmap dirty, so that the next exaMoveInPixmap will actually move
320*4882a593Smuzhiyun * all the data, since it's almost surely all valid now.
321*4882a593Smuzhiyun */
322*4882a593Smuzhiyun static void
exaDoMoveInPixmap(ExaMigrationPtr migrate)323*4882a593Smuzhiyun exaDoMoveInPixmap(ExaMigrationPtr migrate)
324*4882a593Smuzhiyun {
325*4882a593Smuzhiyun PixmapPtr pPixmap = migrate->pPix;
326*4882a593Smuzhiyun ScreenPtr pScreen = pPixmap->drawable.pScreen;
327*4882a593Smuzhiyun
328*4882a593Smuzhiyun ExaScreenPriv(pScreen);
329*4882a593Smuzhiyun ExaPixmapPriv(pPixmap);
330*4882a593Smuzhiyun
331*4882a593Smuzhiyun /* If we're VT-switched away, no touching card memory allowed. */
332*4882a593Smuzhiyun if (pExaScr->swappedOut)
333*4882a593Smuzhiyun return;
334*4882a593Smuzhiyun
335*4882a593Smuzhiyun /* If we're not allowed to move, then fail. */
336*4882a593Smuzhiyun if (exaPixmapIsPinned(pPixmap))
337*4882a593Smuzhiyun return;
338*4882a593Smuzhiyun
339*4882a593Smuzhiyun /* Don't migrate in pixmaps which are less than 8bpp. This avoids a lot of
340*4882a593Smuzhiyun * fragility in EXA, and <8bpp is probably not used enough any more to care
341*4882a593Smuzhiyun * (at least, not in acceleratd paths).
342*4882a593Smuzhiyun */
343*4882a593Smuzhiyun if (pPixmap->drawable.bitsPerPixel < 8)
344*4882a593Smuzhiyun return;
345*4882a593Smuzhiyun
346*4882a593Smuzhiyun if (pExaPixmap->accel_blocked)
347*4882a593Smuzhiyun return;
348*4882a593Smuzhiyun
349*4882a593Smuzhiyun if (pExaPixmap->area == NULL) {
350*4882a593Smuzhiyun pExaPixmap->area =
351*4882a593Smuzhiyun exaOffscreenAlloc(pScreen, pExaPixmap->fb_size,
352*4882a593Smuzhiyun pExaScr->info->pixmapOffsetAlign, FALSE,
353*4882a593Smuzhiyun exaPixmapSave, (void *) pPixmap);
354*4882a593Smuzhiyun if (pExaPixmap->area == NULL)
355*4882a593Smuzhiyun return;
356*4882a593Smuzhiyun
357*4882a593Smuzhiyun pExaPixmap->fb_ptr = (CARD8 *) pExaScr->info->memoryBase +
358*4882a593Smuzhiyun pExaPixmap->area->offset;
359*4882a593Smuzhiyun }
360*4882a593Smuzhiyun
361*4882a593Smuzhiyun exaCopyDirtyToFb(migrate);
362*4882a593Smuzhiyun
363*4882a593Smuzhiyun if (exaPixmapHasGpuCopy(pPixmap))
364*4882a593Smuzhiyun return;
365*4882a593Smuzhiyun
366*4882a593Smuzhiyun DBG_MIGRATE(("-> %p (0x%x) (%dx%d) (%c)\n", pPixmap,
367*4882a593Smuzhiyun (ExaGetPixmapPriv(pPixmap)->area ?
368*4882a593Smuzhiyun ExaGetPixmapPriv(pPixmap)->area->offset : 0),
369*4882a593Smuzhiyun pPixmap->drawable.width,
370*4882a593Smuzhiyun pPixmap->drawable.height,
371*4882a593Smuzhiyun exaPixmapIsDirty(pPixmap) ? 'd' : 'c'));
372*4882a593Smuzhiyun
373*4882a593Smuzhiyun pExaPixmap->use_gpu_copy = TRUE;
374*4882a593Smuzhiyun
375*4882a593Smuzhiyun pPixmap->devKind = pExaPixmap->fb_pitch;
376*4882a593Smuzhiyun pPixmap->drawable.serialNumber = NEXT_SERIAL_NUMBER;
377*4882a593Smuzhiyun }
378*4882a593Smuzhiyun
379*4882a593Smuzhiyun void
exaMoveInPixmap_classic(PixmapPtr pPixmap)380*4882a593Smuzhiyun exaMoveInPixmap_classic(PixmapPtr pPixmap)
381*4882a593Smuzhiyun {
382*4882a593Smuzhiyun static ExaMigrationRec migrate = {.as_dst = FALSE,.as_src = TRUE,
383*4882a593Smuzhiyun .pReg = NULL
384*4882a593Smuzhiyun };
385*4882a593Smuzhiyun
386*4882a593Smuzhiyun migrate.pPix = pPixmap;
387*4882a593Smuzhiyun exaDoMoveInPixmap(&migrate);
388*4882a593Smuzhiyun }
389*4882a593Smuzhiyun
390*4882a593Smuzhiyun /**
391*4882a593Smuzhiyun * Switches the current active location of the pixmap to system memory, copying
392*4882a593Smuzhiyun * updated data out if necessary.
393*4882a593Smuzhiyun */
394*4882a593Smuzhiyun static void
exaDoMoveOutPixmap(ExaMigrationPtr migrate)395*4882a593Smuzhiyun exaDoMoveOutPixmap(ExaMigrationPtr migrate)
396*4882a593Smuzhiyun {
397*4882a593Smuzhiyun PixmapPtr pPixmap = migrate->pPix;
398*4882a593Smuzhiyun
399*4882a593Smuzhiyun ExaPixmapPriv(pPixmap);
400*4882a593Smuzhiyun
401*4882a593Smuzhiyun if (!pExaPixmap->area || exaPixmapIsPinned(pPixmap))
402*4882a593Smuzhiyun return;
403*4882a593Smuzhiyun
404*4882a593Smuzhiyun exaCopyDirtyToSys(migrate);
405*4882a593Smuzhiyun
406*4882a593Smuzhiyun if (exaPixmapHasGpuCopy(pPixmap)) {
407*4882a593Smuzhiyun
408*4882a593Smuzhiyun DBG_MIGRATE(("<- %p (%p) (%dx%d) (%c)\n", pPixmap,
409*4882a593Smuzhiyun (void *) (ExaGetPixmapPriv(pPixmap)->area ?
410*4882a593Smuzhiyun ExaGetPixmapPriv(pPixmap)->area->offset : 0),
411*4882a593Smuzhiyun pPixmap->drawable.width,
412*4882a593Smuzhiyun pPixmap->drawable.height,
413*4882a593Smuzhiyun exaPixmapIsDirty(pPixmap) ? 'd' : 'c'));
414*4882a593Smuzhiyun
415*4882a593Smuzhiyun pExaPixmap->use_gpu_copy = FALSE;
416*4882a593Smuzhiyun
417*4882a593Smuzhiyun pPixmap->devKind = pExaPixmap->sys_pitch;
418*4882a593Smuzhiyun pPixmap->drawable.serialNumber = NEXT_SERIAL_NUMBER;
419*4882a593Smuzhiyun }
420*4882a593Smuzhiyun }
421*4882a593Smuzhiyun
422*4882a593Smuzhiyun void
exaMoveOutPixmap_classic(PixmapPtr pPixmap)423*4882a593Smuzhiyun exaMoveOutPixmap_classic(PixmapPtr pPixmap)
424*4882a593Smuzhiyun {
425*4882a593Smuzhiyun static ExaMigrationRec migrate = {.as_dst = FALSE,.as_src = TRUE,
426*4882a593Smuzhiyun .pReg = NULL
427*4882a593Smuzhiyun };
428*4882a593Smuzhiyun
429*4882a593Smuzhiyun migrate.pPix = pPixmap;
430*4882a593Smuzhiyun exaDoMoveOutPixmap(&migrate);
431*4882a593Smuzhiyun }
432*4882a593Smuzhiyun
433*4882a593Smuzhiyun /**
434*4882a593Smuzhiyun * Copies out important pixmap data and removes references to framebuffer area.
435*4882a593Smuzhiyun * Called when the memory manager decides it's time to kick the pixmap out of
436*4882a593Smuzhiyun * framebuffer entirely.
437*4882a593Smuzhiyun */
438*4882a593Smuzhiyun void
exaPixmapSave(ScreenPtr pScreen,ExaOffscreenArea * area)439*4882a593Smuzhiyun exaPixmapSave(ScreenPtr pScreen, ExaOffscreenArea * area)
440*4882a593Smuzhiyun {
441*4882a593Smuzhiyun PixmapPtr pPixmap = area->privData;
442*4882a593Smuzhiyun
443*4882a593Smuzhiyun ExaPixmapPriv(pPixmap);
444*4882a593Smuzhiyun
445*4882a593Smuzhiyun exaMoveOutPixmap(pPixmap);
446*4882a593Smuzhiyun
447*4882a593Smuzhiyun pExaPixmap->fb_ptr = NULL;
448*4882a593Smuzhiyun pExaPixmap->area = NULL;
449*4882a593Smuzhiyun
450*4882a593Smuzhiyun /* Mark all FB bits as invalid, so all valid system bits get copied to FB
451*4882a593Smuzhiyun * next time */
452*4882a593Smuzhiyun RegionEmpty(&pExaPixmap->validFB);
453*4882a593Smuzhiyun }
454*4882a593Smuzhiyun
455*4882a593Smuzhiyun /**
456*4882a593Smuzhiyun * For the "greedy" migration scheme, pushes the pixmap toward being located in
457*4882a593Smuzhiyun * framebuffer memory.
458*4882a593Smuzhiyun */
459*4882a593Smuzhiyun static void
exaMigrateTowardFb(ExaMigrationPtr migrate)460*4882a593Smuzhiyun exaMigrateTowardFb(ExaMigrationPtr migrate)
461*4882a593Smuzhiyun {
462*4882a593Smuzhiyun PixmapPtr pPixmap = migrate->pPix;
463*4882a593Smuzhiyun
464*4882a593Smuzhiyun ExaPixmapPriv(pPixmap);
465*4882a593Smuzhiyun
466*4882a593Smuzhiyun if (pExaPixmap->score == EXA_PIXMAP_SCORE_PINNED) {
467*4882a593Smuzhiyun DBG_MIGRATE(("UseScreen: not migrating pinned pixmap %p\n",
468*4882a593Smuzhiyun (void *) pPixmap));
469*4882a593Smuzhiyun return;
470*4882a593Smuzhiyun }
471*4882a593Smuzhiyun
472*4882a593Smuzhiyun DBG_MIGRATE(("UseScreen %p score %d\n",
473*4882a593Smuzhiyun (void *) pPixmap, pExaPixmap->score));
474*4882a593Smuzhiyun
475*4882a593Smuzhiyun if (pExaPixmap->score == EXA_PIXMAP_SCORE_INIT) {
476*4882a593Smuzhiyun exaDoMoveInPixmap(migrate);
477*4882a593Smuzhiyun pExaPixmap->score = 0;
478*4882a593Smuzhiyun }
479*4882a593Smuzhiyun
480*4882a593Smuzhiyun if (pExaPixmap->score < EXA_PIXMAP_SCORE_MAX)
481*4882a593Smuzhiyun pExaPixmap->score++;
482*4882a593Smuzhiyun
483*4882a593Smuzhiyun if (pExaPixmap->score >= EXA_PIXMAP_SCORE_MOVE_IN &&
484*4882a593Smuzhiyun !exaPixmapHasGpuCopy(pPixmap)) {
485*4882a593Smuzhiyun exaDoMoveInPixmap(migrate);
486*4882a593Smuzhiyun }
487*4882a593Smuzhiyun
488*4882a593Smuzhiyun if (exaPixmapHasGpuCopy(pPixmap)) {
489*4882a593Smuzhiyun exaCopyDirtyToFb(migrate);
490*4882a593Smuzhiyun ExaOffscreenMarkUsed(pPixmap);
491*4882a593Smuzhiyun }
492*4882a593Smuzhiyun else
493*4882a593Smuzhiyun exaCopyDirtyToSys(migrate);
494*4882a593Smuzhiyun }
495*4882a593Smuzhiyun
496*4882a593Smuzhiyun /**
497*4882a593Smuzhiyun * For the "greedy" migration scheme, pushes the pixmap toward being located in
498*4882a593Smuzhiyun * system memory.
499*4882a593Smuzhiyun */
500*4882a593Smuzhiyun static void
exaMigrateTowardSys(ExaMigrationPtr migrate)501*4882a593Smuzhiyun exaMigrateTowardSys(ExaMigrationPtr migrate)
502*4882a593Smuzhiyun {
503*4882a593Smuzhiyun PixmapPtr pPixmap = migrate->pPix;
504*4882a593Smuzhiyun
505*4882a593Smuzhiyun ExaPixmapPriv(pPixmap);
506*4882a593Smuzhiyun
507*4882a593Smuzhiyun DBG_MIGRATE(("UseMem: %p score %d\n", (void *) pPixmap,
508*4882a593Smuzhiyun pExaPixmap->score));
509*4882a593Smuzhiyun
510*4882a593Smuzhiyun if (pExaPixmap->score == EXA_PIXMAP_SCORE_PINNED)
511*4882a593Smuzhiyun return;
512*4882a593Smuzhiyun
513*4882a593Smuzhiyun if (pExaPixmap->score == EXA_PIXMAP_SCORE_INIT)
514*4882a593Smuzhiyun pExaPixmap->score = 0;
515*4882a593Smuzhiyun
516*4882a593Smuzhiyun if (pExaPixmap->score > EXA_PIXMAP_SCORE_MIN)
517*4882a593Smuzhiyun pExaPixmap->score--;
518*4882a593Smuzhiyun
519*4882a593Smuzhiyun if (pExaPixmap->score <= EXA_PIXMAP_SCORE_MOVE_OUT && pExaPixmap->area)
520*4882a593Smuzhiyun exaDoMoveOutPixmap(migrate);
521*4882a593Smuzhiyun
522*4882a593Smuzhiyun if (exaPixmapHasGpuCopy(pPixmap)) {
523*4882a593Smuzhiyun exaCopyDirtyToFb(migrate);
524*4882a593Smuzhiyun ExaOffscreenMarkUsed(pPixmap);
525*4882a593Smuzhiyun }
526*4882a593Smuzhiyun else
527*4882a593Smuzhiyun exaCopyDirtyToSys(migrate);
528*4882a593Smuzhiyun }
529*4882a593Smuzhiyun
530*4882a593Smuzhiyun /**
531*4882a593Smuzhiyun * If the pixmap has both a framebuffer and system memory copy, this function
532*4882a593Smuzhiyun * asserts that both of them are the same.
533*4882a593Smuzhiyun */
534*4882a593Smuzhiyun static Bool
exaAssertNotDirty(PixmapPtr pPixmap)535*4882a593Smuzhiyun exaAssertNotDirty(PixmapPtr pPixmap)
536*4882a593Smuzhiyun {
537*4882a593Smuzhiyun ExaPixmapPriv(pPixmap);
538*4882a593Smuzhiyun CARD8 *dst, *src;
539*4882a593Smuzhiyun RegionRec ValidReg;
540*4882a593Smuzhiyun int dst_pitch, src_pitch, cpp, y, nbox, save_pitch;
541*4882a593Smuzhiyun BoxPtr pBox;
542*4882a593Smuzhiyun Bool ret = TRUE, save_use_gpu_copy;
543*4882a593Smuzhiyun
544*4882a593Smuzhiyun if (exaPixmapIsPinned(pPixmap) || pExaPixmap->area == NULL)
545*4882a593Smuzhiyun return ret;
546*4882a593Smuzhiyun
547*4882a593Smuzhiyun RegionNull(&ValidReg);
548*4882a593Smuzhiyun RegionIntersect(&ValidReg, &pExaPixmap->validFB, &pExaPixmap->validSys);
549*4882a593Smuzhiyun nbox = RegionNumRects(&ValidReg);
550*4882a593Smuzhiyun
551*4882a593Smuzhiyun if (!nbox)
552*4882a593Smuzhiyun goto out;
553*4882a593Smuzhiyun
554*4882a593Smuzhiyun pBox = RegionRects(&ValidReg);
555*4882a593Smuzhiyun
556*4882a593Smuzhiyun dst_pitch = pExaPixmap->sys_pitch;
557*4882a593Smuzhiyun src_pitch = pExaPixmap->fb_pitch;
558*4882a593Smuzhiyun cpp = pPixmap->drawable.bitsPerPixel / 8;
559*4882a593Smuzhiyun
560*4882a593Smuzhiyun save_use_gpu_copy = pExaPixmap->use_gpu_copy;
561*4882a593Smuzhiyun save_pitch = pPixmap->devKind;
562*4882a593Smuzhiyun pExaPixmap->use_gpu_copy = TRUE;
563*4882a593Smuzhiyun pPixmap->devKind = pExaPixmap->fb_pitch;
564*4882a593Smuzhiyun
565*4882a593Smuzhiyun if (!ExaDoPrepareAccess(pPixmap, EXA_PREPARE_SRC))
566*4882a593Smuzhiyun goto skip;
567*4882a593Smuzhiyun
568*4882a593Smuzhiyun while (nbox--) {
569*4882a593Smuzhiyun int rowbytes;
570*4882a593Smuzhiyun
571*4882a593Smuzhiyun pBox->x1 = max(pBox->x1, 0);
572*4882a593Smuzhiyun pBox->y1 = max(pBox->y1, 0);
573*4882a593Smuzhiyun pBox->x2 = min(pBox->x2, pPixmap->drawable.width);
574*4882a593Smuzhiyun pBox->y2 = min(pBox->y2, pPixmap->drawable.height);
575*4882a593Smuzhiyun
576*4882a593Smuzhiyun if (pBox->x1 >= pBox->x2 || pBox->y1 >= pBox->y2)
577*4882a593Smuzhiyun continue;
578*4882a593Smuzhiyun
579*4882a593Smuzhiyun rowbytes = (pBox->x2 - pBox->x1) * cpp;
580*4882a593Smuzhiyun src =
581*4882a593Smuzhiyun (CARD8 *) pPixmap->devPrivate.ptr + pBox->y1 * src_pitch +
582*4882a593Smuzhiyun pBox->x1 * cpp;
583*4882a593Smuzhiyun dst = pExaPixmap->sys_ptr + pBox->y1 * dst_pitch + pBox->x1 * cpp;
584*4882a593Smuzhiyun
585*4882a593Smuzhiyun for (y = pBox->y1; y < pBox->y2;
586*4882a593Smuzhiyun y++, src += src_pitch, dst += dst_pitch) {
587*4882a593Smuzhiyun if (memcmp(dst, src, rowbytes) != 0) {
588*4882a593Smuzhiyun ret = FALSE;
589*4882a593Smuzhiyun exaPixmapDirty(pPixmap, pBox->x1, pBox->y1, pBox->x2, pBox->y2);
590*4882a593Smuzhiyun break;
591*4882a593Smuzhiyun }
592*4882a593Smuzhiyun }
593*4882a593Smuzhiyun }
594*4882a593Smuzhiyun
595*4882a593Smuzhiyun skip:
596*4882a593Smuzhiyun exaFinishAccess(&pPixmap->drawable, EXA_PREPARE_SRC);
597*4882a593Smuzhiyun
598*4882a593Smuzhiyun pExaPixmap->use_gpu_copy = save_use_gpu_copy;
599*4882a593Smuzhiyun pPixmap->devKind = save_pitch;
600*4882a593Smuzhiyun
601*4882a593Smuzhiyun out:
602*4882a593Smuzhiyun RegionUninit(&ValidReg);
603*4882a593Smuzhiyun return ret;
604*4882a593Smuzhiyun }
605*4882a593Smuzhiyun
606*4882a593Smuzhiyun /**
607*4882a593Smuzhiyun * Performs migration of the pixmaps according to the operation information
608*4882a593Smuzhiyun * provided in pixmaps and can_accel and the migration scheme chosen in the
609*4882a593Smuzhiyun * config file.
610*4882a593Smuzhiyun */
611*4882a593Smuzhiyun void
exaDoMigration_classic(ExaMigrationPtr pixmaps,int npixmaps,Bool can_accel)612*4882a593Smuzhiyun exaDoMigration_classic(ExaMigrationPtr pixmaps, int npixmaps, Bool can_accel)
613*4882a593Smuzhiyun {
614*4882a593Smuzhiyun ScreenPtr pScreen = pixmaps[0].pPix->drawable.pScreen;
615*4882a593Smuzhiyun
616*4882a593Smuzhiyun ExaScreenPriv(pScreen);
617*4882a593Smuzhiyun int i, j;
618*4882a593Smuzhiyun
619*4882a593Smuzhiyun /* If this debugging flag is set, check each pixmap for whether it is marked
620*4882a593Smuzhiyun * as clean, and if so, actually check if that's the case. This should help
621*4882a593Smuzhiyun * catch issues with failing to mark a drawable as dirty. While it will
622*4882a593Smuzhiyun * catch them late (after the operation happened), it at least explains what
623*4882a593Smuzhiyun * went wrong, and instrumenting the code to find what operation happened
624*4882a593Smuzhiyun * to the pixmap last shouldn't be hard.
625*4882a593Smuzhiyun */
626*4882a593Smuzhiyun if (pExaScr->checkDirtyCorrectness) {
627*4882a593Smuzhiyun for (i = 0; i < npixmaps; i++) {
628*4882a593Smuzhiyun if (!exaPixmapIsDirty(pixmaps[i].pPix) &&
629*4882a593Smuzhiyun !exaAssertNotDirty(pixmaps[i].pPix))
630*4882a593Smuzhiyun ErrorF("%s: Pixmap %d dirty but not marked as such!\n",
631*4882a593Smuzhiyun __func__, i);
632*4882a593Smuzhiyun }
633*4882a593Smuzhiyun }
634*4882a593Smuzhiyun /* If anything is pinned in system memory, we won't be able to
635*4882a593Smuzhiyun * accelerate.
636*4882a593Smuzhiyun */
637*4882a593Smuzhiyun for (i = 0; i < npixmaps; i++) {
638*4882a593Smuzhiyun if (exaPixmapIsPinned(pixmaps[i].pPix) &&
639*4882a593Smuzhiyun !exaPixmapHasGpuCopy(pixmaps[i].pPix)) {
640*4882a593Smuzhiyun EXA_FALLBACK(("Pixmap %p (%dx%d) pinned in sys\n", pixmaps[i].pPix,
641*4882a593Smuzhiyun pixmaps[i].pPix->drawable.width,
642*4882a593Smuzhiyun pixmaps[i].pPix->drawable.height));
643*4882a593Smuzhiyun can_accel = FALSE;
644*4882a593Smuzhiyun break;
645*4882a593Smuzhiyun }
646*4882a593Smuzhiyun }
647*4882a593Smuzhiyun
648*4882a593Smuzhiyun if (pExaScr->migration == ExaMigrationSmart) {
649*4882a593Smuzhiyun /* If we've got something as a destination that we shouldn't cause to
650*4882a593Smuzhiyun * become newly dirtied, take the unaccelerated route.
651*4882a593Smuzhiyun */
652*4882a593Smuzhiyun for (i = 0; i < npixmaps; i++) {
653*4882a593Smuzhiyun if (pixmaps[i].as_dst && !exaPixmapShouldBeInFB(pixmaps[i].pPix) &&
654*4882a593Smuzhiyun !exaPixmapIsDirty(pixmaps[i].pPix)) {
655*4882a593Smuzhiyun for (i = 0; i < npixmaps; i++) {
656*4882a593Smuzhiyun if (!exaPixmapIsDirty(pixmaps[i].pPix))
657*4882a593Smuzhiyun exaDoMoveOutPixmap(pixmaps + i);
658*4882a593Smuzhiyun }
659*4882a593Smuzhiyun return;
660*4882a593Smuzhiyun }
661*4882a593Smuzhiyun }
662*4882a593Smuzhiyun
663*4882a593Smuzhiyun /* If we aren't going to accelerate, then we migrate everybody toward
664*4882a593Smuzhiyun * system memory, and kick out if it's free.
665*4882a593Smuzhiyun */
666*4882a593Smuzhiyun if (!can_accel) {
667*4882a593Smuzhiyun for (i = 0; i < npixmaps; i++) {
668*4882a593Smuzhiyun exaMigrateTowardSys(pixmaps + i);
669*4882a593Smuzhiyun if (!exaPixmapIsDirty(pixmaps[i].pPix))
670*4882a593Smuzhiyun exaDoMoveOutPixmap(pixmaps + i);
671*4882a593Smuzhiyun }
672*4882a593Smuzhiyun return;
673*4882a593Smuzhiyun }
674*4882a593Smuzhiyun
675*4882a593Smuzhiyun /* Finally, the acceleration path. Move them all in. */
676*4882a593Smuzhiyun for (i = 0; i < npixmaps; i++) {
677*4882a593Smuzhiyun exaMigrateTowardFb(pixmaps + i);
678*4882a593Smuzhiyun exaDoMoveInPixmap(pixmaps + i);
679*4882a593Smuzhiyun }
680*4882a593Smuzhiyun }
681*4882a593Smuzhiyun else if (pExaScr->migration == ExaMigrationGreedy) {
682*4882a593Smuzhiyun /* If we can't accelerate, either because the driver can't or because one of
683*4882a593Smuzhiyun * the pixmaps is pinned in system memory, then we migrate everybody toward
684*4882a593Smuzhiyun * system memory.
685*4882a593Smuzhiyun *
686*4882a593Smuzhiyun * We also migrate toward system if all pixmaps involved are currently in
687*4882a593Smuzhiyun * system memory -- this can mitigate thrashing when there are significantly
688*4882a593Smuzhiyun * more pixmaps active than would fit in memory.
689*4882a593Smuzhiyun *
690*4882a593Smuzhiyun * If not, then we migrate toward FB so that hopefully acceleration can
691*4882a593Smuzhiyun * happen.
692*4882a593Smuzhiyun */
693*4882a593Smuzhiyun if (!can_accel) {
694*4882a593Smuzhiyun for (i = 0; i < npixmaps; i++)
695*4882a593Smuzhiyun exaMigrateTowardSys(pixmaps + i);
696*4882a593Smuzhiyun return;
697*4882a593Smuzhiyun }
698*4882a593Smuzhiyun
699*4882a593Smuzhiyun for (i = 0; i < npixmaps; i++) {
700*4882a593Smuzhiyun if (exaPixmapHasGpuCopy(pixmaps[i].pPix)) {
701*4882a593Smuzhiyun /* Found one in FB, so move all to FB. */
702*4882a593Smuzhiyun for (j = 0; j < npixmaps; j++)
703*4882a593Smuzhiyun exaMigrateTowardFb(pixmaps + i);
704*4882a593Smuzhiyun return;
705*4882a593Smuzhiyun }
706*4882a593Smuzhiyun }
707*4882a593Smuzhiyun
708*4882a593Smuzhiyun /* Nobody's in FB, so move all away from FB. */
709*4882a593Smuzhiyun for (i = 0; i < npixmaps; i++)
710*4882a593Smuzhiyun exaMigrateTowardSys(pixmaps + i);
711*4882a593Smuzhiyun }
712*4882a593Smuzhiyun else if (pExaScr->migration == ExaMigrationAlways) {
713*4882a593Smuzhiyun /* Always move the pixmaps out if we can't accelerate. If we can
714*4882a593Smuzhiyun * accelerate, try to move them all in. If that fails, then move them
715*4882a593Smuzhiyun * back out.
716*4882a593Smuzhiyun */
717*4882a593Smuzhiyun if (!can_accel) {
718*4882a593Smuzhiyun for (i = 0; i < npixmaps; i++)
719*4882a593Smuzhiyun exaDoMoveOutPixmap(pixmaps + i);
720*4882a593Smuzhiyun return;
721*4882a593Smuzhiyun }
722*4882a593Smuzhiyun
723*4882a593Smuzhiyun /* Now, try to move them all into FB */
724*4882a593Smuzhiyun for (i = 0; i < npixmaps; i++) {
725*4882a593Smuzhiyun exaDoMoveInPixmap(pixmaps + i);
726*4882a593Smuzhiyun }
727*4882a593Smuzhiyun
728*4882a593Smuzhiyun /* If we couldn't fit everything in, abort */
729*4882a593Smuzhiyun for (i = 0; i < npixmaps; i++) {
730*4882a593Smuzhiyun if (!exaPixmapHasGpuCopy(pixmaps[i].pPix)) {
731*4882a593Smuzhiyun return;
732*4882a593Smuzhiyun }
733*4882a593Smuzhiyun }
734*4882a593Smuzhiyun
735*4882a593Smuzhiyun /* Yay, everything has a gpu copy, mark memory as used */
736*4882a593Smuzhiyun for (i = 0; i < npixmaps; i++) {
737*4882a593Smuzhiyun ExaOffscreenMarkUsed(pixmaps[i].pPix);
738*4882a593Smuzhiyun }
739*4882a593Smuzhiyun }
740*4882a593Smuzhiyun }
741*4882a593Smuzhiyun
742*4882a593Smuzhiyun void
exaPrepareAccessReg_classic(PixmapPtr pPixmap,int index,RegionPtr pReg)743*4882a593Smuzhiyun exaPrepareAccessReg_classic(PixmapPtr pPixmap, int index, RegionPtr pReg)
744*4882a593Smuzhiyun {
745*4882a593Smuzhiyun ExaMigrationRec pixmaps[1];
746*4882a593Smuzhiyun
747*4882a593Smuzhiyun if (index == EXA_PREPARE_DEST || index == EXA_PREPARE_AUX_DEST) {
748*4882a593Smuzhiyun pixmaps[0].as_dst = TRUE;
749*4882a593Smuzhiyun pixmaps[0].as_src = FALSE;
750*4882a593Smuzhiyun }
751*4882a593Smuzhiyun else {
752*4882a593Smuzhiyun pixmaps[0].as_dst = FALSE;
753*4882a593Smuzhiyun pixmaps[0].as_src = TRUE;
754*4882a593Smuzhiyun }
755*4882a593Smuzhiyun pixmaps[0].pPix = pPixmap;
756*4882a593Smuzhiyun pixmaps[0].pReg = pReg;
757*4882a593Smuzhiyun
758*4882a593Smuzhiyun exaDoMigration(pixmaps, 1, FALSE);
759*4882a593Smuzhiyun
760*4882a593Smuzhiyun (void) ExaDoPrepareAccess(pPixmap, index);
761*4882a593Smuzhiyun }
762