Lines Matching refs:cur

92 				struct xe_res_cursor *cur)  in xe_res_first()  argument
94 cur->sgl = NULL; in xe_res_first()
95 cur->dma_addr = NULL; in xe_res_first()
101 cur->mem_type = res->mem_type; in xe_res_first()
103 switch (cur->mem_type) { in xe_res_first()
128 cur->mm = mm; in xe_res_first()
129 cur->start = drm_buddy_block_offset(block) + start; in xe_res_first()
130 cur->size = min(drm_buddy_block_size(mm, block) - start, in xe_res_first()
132 cur->remaining = size; in xe_res_first()
133 cur->node = block; in xe_res_first()
143 cur->start = start; in xe_res_first()
144 cur->size = size; in xe_res_first()
145 cur->remaining = size; in xe_res_first()
146 cur->node = NULL; in xe_res_first()
147 cur->mem_type = XE_PL_TT; in xe_res_first()
151 static inline void __xe_res_sg_next(struct xe_res_cursor *cur) in __xe_res_sg_next() argument
153 struct scatterlist *sgl = cur->sgl; in __xe_res_sg_next()
154 u64 start = cur->start; in __xe_res_sg_next()
162 cur->start = start; in __xe_res_sg_next()
163 cur->size = sg_dma_len(sgl) - start; in __xe_res_sg_next()
164 cur->sgl = sgl; in __xe_res_sg_next()
171 static inline void __xe_res_dma_next(struct xe_res_cursor *cur) in __xe_res_dma_next() argument
173 const struct drm_pagemap_addr *addr = cur->dma_addr; in __xe_res_dma_next()
174 u64 start = cur->start; in __xe_res_dma_next()
176 while (start >= cur->dma_seg_size) { in __xe_res_dma_next()
177 start -= cur->dma_seg_size; in __xe_res_dma_next()
179 cur->dma_seg_size = PAGE_SIZE << addr->order; in __xe_res_dma_next()
181 cur->dma_start = addr->addr; in __xe_res_dma_next()
184 while (cur->dma_seg_size - start < cur->remaining) { in __xe_res_dma_next()
185 if (cur->dma_start + cur->dma_seg_size != addr[1].addr || in __xe_res_dma_next()
189 cur->dma_seg_size += PAGE_SIZE << addr->order; in __xe_res_dma_next()
192 cur->dma_addr = addr; in __xe_res_dma_next()
193 cur->start = start; in __xe_res_dma_next()
194 cur->size = cur->dma_seg_size - start; in __xe_res_dma_next()
209 struct xe_res_cursor *cur) in xe_res_first_sg() argument
212 cur->node = NULL; in xe_res_first_sg()
213 cur->start = start; in xe_res_first_sg()
214 cur->remaining = size; in xe_res_first_sg()
215 cur->size = 0; in xe_res_first_sg()
216 cur->dma_addr = NULL; in xe_res_first_sg()
217 cur->sgl = sg->sgl; in xe_res_first_sg()
218 cur->mem_type = XE_PL_TT; in xe_res_first_sg()
219 __xe_res_sg_next(cur); in xe_res_first_sg()
234 struct xe_res_cursor *cur) in xe_res_first_dma() argument
240 cur->node = NULL; in xe_res_first_dma()
241 cur->start = start; in xe_res_first_dma()
242 cur->remaining = size; in xe_res_first_dma()
243 cur->dma_seg_size = PAGE_SIZE << dma_addr->order; in xe_res_first_dma()
244 cur->dma_start = 0; in xe_res_first_dma()
245 cur->size = 0; in xe_res_first_dma()
246 cur->dma_addr = dma_addr; in xe_res_first_dma()
247 __xe_res_dma_next(cur); in xe_res_first_dma()
248 cur->sgl = NULL; in xe_res_first_dma()
249 cur->mem_type = XE_PL_TT; in xe_res_first_dma()
260 static inline void xe_res_next(struct xe_res_cursor *cur, u64 size) in xe_res_next() argument
266 XE_WARN_ON(size > cur->remaining); in xe_res_next()
268 cur->remaining -= size; in xe_res_next()
269 if (!cur->remaining) in xe_res_next()
272 if (cur->size > size) { in xe_res_next()
273 cur->size -= size; in xe_res_next()
274 cur->start += size; in xe_res_next()
278 if (cur->dma_addr) { in xe_res_next()
279 cur->start += size; in xe_res_next()
280 __xe_res_dma_next(cur); in xe_res_next()
284 if (cur->sgl) { in xe_res_next()
285 cur->start += size; in xe_res_next()
286 __xe_res_sg_next(cur); in xe_res_next()
290 switch (cur->mem_type) { in xe_res_next()
294 start = size - cur->size; in xe_res_next()
295 block = cur->node; in xe_res_next()
301 while (start >= drm_buddy_block_size(cur->mm, block)) { in xe_res_next()
302 start -= drm_buddy_block_size(cur->mm, block); in xe_res_next()
308 cur->start = drm_buddy_block_offset(block) + start; in xe_res_next()
309 cur->size = min(drm_buddy_block_size(cur->mm, block) - start, in xe_res_next()
310 cur->remaining); in xe_res_next()
311 cur->node = block; in xe_res_next()
323 static inline u64 xe_res_dma(const struct xe_res_cursor *cur) in xe_res_dma() argument
325 if (cur->dma_addr) in xe_res_dma()
326 return cur->dma_start + cur->start; in xe_res_dma()
327 else if (cur->sgl) in xe_res_dma()
328 return sg_dma_address(cur->sgl) + cur->start; in xe_res_dma()
330 return cur->start; in xe_res_dma()
340 static inline bool xe_res_is_vram(const struct xe_res_cursor *cur) in xe_res_is_vram() argument
342 if (cur->dma_addr) in xe_res_is_vram()
343 return cur->dma_addr->proto == XE_INTERCONNECT_VRAM; in xe_res_is_vram()
345 switch (cur->mem_type) { in xe_res_is_vram()