1/* SPDX-License-Identifier: GPL-2.0-or-later */
2/*
3 * vDSO provided cache flush routines
4 *
5 * Copyright (C) 2004 Benjamin Herrenschmuidt (benh@kernel.crashing.org),
6 *                    IBM Corp.
7 */
8#include <asm/processor.h>
9#include <asm/ppc_asm.h>
10#include <asm/vdso.h>
11#include <asm/vdso_datapage.h>
12#include <asm/asm-offsets.h>
13
14	.text
15
16/*
17 * Default "generic" version of __kernel_sync_dicache.
18 *
19 * void __kernel_sync_dicache(unsigned long start, unsigned long end)
20 *
21 * Flushes the data cache & invalidate the instruction cache for the
22 * provided range [start, end[
23 */
24V_FUNCTION_BEGIN(__kernel_sync_dicache)
25  .cfi_startproc
26	mflr	r12
27  .cfi_register lr,r12
28	get_datapage	r10, r0
29	mtlr	r12
30
31	lwz	r7,CFG_DCACHE_BLOCKSZ(r10)
32	addi	r5,r7,-1
33	andc	r6,r3,r5		/* round low to line bdy */
34	subf	r8,r6,r4		/* compute length */
35	add	r8,r8,r5		/* ensure we get enough */
36	lwz	r9,CFG_DCACHE_LOGBLOCKSZ(r10)
37	srd.	r8,r8,r9		/* compute line count */
38	crclr	cr0*4+so
39	beqlr				/* nothing to do? */
40	mtctr	r8
411:	dcbst	0,r6
42	add	r6,r6,r7
43	bdnz	1b
44	sync
45
46/* Now invalidate the instruction cache */
47
48	lwz	r7,CFG_ICACHE_BLOCKSZ(r10)
49	addi	r5,r7,-1
50	andc	r6,r3,r5		/* round low to line bdy */
51	subf	r8,r6,r4		/* compute length */
52	add	r8,r8,r5
53	lwz	r9,CFG_ICACHE_LOGBLOCKSZ(r10)
54	srd.	r8,r8,r9		/* compute line count */
55	crclr	cr0*4+so
56	beqlr				/* nothing to do? */
57	mtctr	r8
582:	icbi	0,r6
59	add	r6,r6,r7
60	bdnz	2b
61	isync
62	li	r3,0
63	blr
64  .cfi_endproc
65V_FUNCTION_END(__kernel_sync_dicache)
66
67
68/*
69 * POWER5 version of __kernel_sync_dicache
70 */
71V_FUNCTION_BEGIN(__kernel_sync_dicache_p5)
72  .cfi_startproc
73	crclr	cr0*4+so
74	sync
75	isync
76	li	r3,0
77	blr
78  .cfi_endproc
79V_FUNCTION_END(__kernel_sync_dicache_p5)
80