Home
last modified time | relevance | path

Searched refs:L1_CACHE_BYTES (Results 1 – 25 of 166) sorted by relevance

1234567

/linux/arch/sh/mm/
H A Dflush-sh4.c19 v = aligned_start & ~(L1_CACHE_BYTES-1); in sh4__flush_wback_region()
20 end = (aligned_start + size + L1_CACHE_BYTES-1) in sh4__flush_wback_region()
21 & ~(L1_CACHE_BYTES-1); in sh4__flush_wback_region()
22 cnt = (end - v) / L1_CACHE_BYTES; in sh4__flush_wback_region()
25 __ocbwb(v); v += L1_CACHE_BYTES; in sh4__flush_wback_region()
26 __ocbwb(v); v += L1_CACHE_BYTES; in sh4__flush_wback_region()
27 __ocbwb(v); v += L1_CACHE_BYTES; in sh4__flush_wback_region()
28 __ocbwb(v); v += L1_CACHE_BYTES; in sh4__flush_wback_region()
29 __ocbwb(v); v += L1_CACHE_BYTES; in sh4__flush_wback_region()
30 __ocbwb(v); v += L1_CACHE_BYTES; in sh4__flush_wback_region()
[all...]
H A Dcache-sh2a.c57 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh2a__flush_wback_region()
58 end = ((unsigned long)start + size + L1_CACHE_BYTES-1) in sh2a__flush_wback_region()
59 & ~(L1_CACHE_BYTES-1); in sh2a__flush_wback_region()
70 for (v = begin; v < end; v += L1_CACHE_BYTES) { in sh2a__flush_wback_region()
78 for (v = begin; v < end; v += L1_CACHE_BYTES) in sh2a__flush_wback_region()
97 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh2a__flush_purge_region()
98 end = ((unsigned long)start + size + L1_CACHE_BYTES-1) in sh2a__flush_purge_region()
99 & ~(L1_CACHE_BYTES-1); in sh2a__flush_purge_region()
104 for (v = begin; v < end; v+=L1_CACHE_BYTES) { in sh2a__flush_purge_region()
127 begin = (unsigned long)start & ~(L1_CACHE_BYTES in sh2a__flush_invalidate_region()
[all...]
H A Dcache-sh2.c23 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh2__flush_wback_region()
24 end = ((unsigned long)start + size + L1_CACHE_BYTES-1) in sh2__flush_wback_region()
25 & ~(L1_CACHE_BYTES-1); in sh2__flush_wback_region()
26 for (v = begin; v < end; v+=L1_CACHE_BYTES) { in sh2__flush_wback_region()
44 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh2__flush_purge_region()
45 end = ((unsigned long)start + size + L1_CACHE_BYTES-1) in sh2__flush_purge_region()
46 & ~(L1_CACHE_BYTES-1); in sh2__flush_purge_region()
48 for (v = begin; v < end; v+=L1_CACHE_BYTES) in sh2__flush_purge_region()
75 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh2__flush_invalidate_region()
76 end = ((unsigned long)start + size + L1_CACHE_BYTES in sh2__flush_invalidate_region()
[all...]
H A Dcache-sh3.c38 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh3__flush_wback_region()
39 end = ((unsigned long)start + size + L1_CACHE_BYTES-1) in sh3__flush_wback_region()
40 & ~(L1_CACHE_BYTES-1); in sh3__flush_wback_region()
42 for (v = begin; v < end; v+=L1_CACHE_BYTES) { in sh3__flush_wback_region()
76 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh3__flush_purge_region()
77 end = ((unsigned long)start + size + L1_CACHE_BYTES-1) in sh3__flush_purge_region()
78 & ~(L1_CACHE_BYTES-1); in sh3__flush_purge_region()
80 for (v = begin; v < end; v+=L1_CACHE_BYTES) { in sh3__flush_purge_region()
/linux/arch/csky/mm/
H A Dcachev2.c26 unsigned long i = start & ~(L1_CACHE_BYTES - 1); in icache_inv_range()
28 for (; i < end; i += L1_CACHE_BYTES) in icache_inv_range()
49 unsigned long i = param->start & ~(L1_CACHE_BYTES - 1); in local_icache_inv_range()
54 for (; i < param->end; i += L1_CACHE_BYTES) in local_icache_inv_range()
81 unsigned long i = start & ~(L1_CACHE_BYTES - 1); in dcache_wb_range()
83 for (; i < end; i += L1_CACHE_BYTES) in dcache_wb_range()
97 unsigned long i = start & ~(L1_CACHE_BYTES - 1); in dma_wbinv_range()
99 for (; i < end; i += L1_CACHE_BYTES) in dma_wbinv_range()
106 unsigned long i = start & ~(L1_CACHE_BYTES - 1); in dma_inv_range()
108 for (; i < end; i += L1_CACHE_BYTES) in dma_inv_range()
[all...]
/linux/arch/microblaze/include/asm/
H A Dcache.h17 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro
19 #define SMP_CACHE_BYTES L1_CACHE_BYTES
22 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
24 #define ARCH_SLAB_MINALIGN L1_CACHE_BYTES
/linux/arch/hexagon/include/asm/
H A Dcache.h13 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro
15 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
17 #define __cacheline_aligned __aligned(L1_CACHE_BYTES)
18 #define ____cacheline_aligned __aligned(L1_CACHE_BYTES)
/linux/arch/arm/lib/
H A Dcopy_page.S14 #define COPY_COUNT (PAGE_SZ / (2 * L1_CACHE_BYTES) PLD( -1 ))
27 PLD( pld [r1, #L1_CACHE_BYTES] )
30 1: PLD( pld [r1, #2 * L1_CACHE_BYTES])
31 PLD( pld [r1, #3 * L1_CACHE_BYTES])
33 .rept (2 * L1_CACHE_BYTES / 16 - 1)
/linux/arch/arc/kernel/
H A Dvmlinux.lds.S62 INIT_TEXT_SECTION(L1_CACHE_BYTES)
67 INIT_SETUP(L1_CACHE_BYTES)
78 PERCPU_SECTION(L1_CACHE_BYTES)
95 EXCEPTION_TABLE(L1_CACHE_BYTES)
105 RW_DATA(L1_CACHE_BYTES, PAGE_SIZE, THREAD_SIZE)
/linux/arch/csky/kernel/
H A Dvmlinux.lds.S49 PERCPU_SECTION(L1_CACHE_BYTES)
55 RW_DATA(L1_CACHE_BYTES, PAGE_SIZE, THREAD_SIZE)
106 EXCEPTION_TABLE(L1_CACHE_BYTES)
107 BSS_SECTION(L1_CACHE_BYTES, PAGE_SIZE, L1_CACHE_BYTES)
/linux/arch/powerpc/include/asm/
H A Dcache.h30 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro
32 #define SMP_CACHE_BYTES L1_CACHE_BYTES
37 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
89 return L1_CACHE_BYTES; in l1_dcache_bytes()
99 return L1_CACHE_BYTES; in l1_icache_bytes()
H A Dpage_32.h44 WARN_ON((unsigned long)addr & (L1_CACHE_BYTES - 1)); in clear_page()
46 for (i = 0; i < PAGE_SIZE / L1_CACHE_BYTES; i++, addr += L1_CACHE_BYTES) in clear_page()
/linux/arch/alpha/include/asm/
H A Dcache.h11 # define L1_CACHE_BYTES 64 macro
17 # define L1_CACHE_BYTES 32 macro
21 #define SMP_CACHE_BYTES L1_CACHE_BYTES
/linux/arch/xtensa/include/asm/
H A Dcache.h17 #define L1_CACHE_BYTES XCHAL_DCACHE_LINESIZE macro
18 #define SMP_CACHE_BYTES L1_CACHE_BYTES
32 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
/linux/drivers/md/dm-vdo/
H A Dcpu.h50 unsigned int offset = ((uintptr_t) address % L1_CACHE_BYTES); in uds_prefetch_range()
51 unsigned int cache_lines = (1 + ((size + offset) / L1_CACHE_BYTES)); in uds_prefetch_range()
55 address += L1_CACHE_BYTES; in uds_prefetch_range()
/linux/arch/nios2/kernel/
H A Dvmlinux.lds.S41 EXCEPTION_TABLE(L1_CACHE_BYTES)
47 PERCPU_SECTION(L1_CACHE_BYTES)
52 RW_DATA(L1_CACHE_BYTES, PAGE_SIZE, THREAD_SIZE)
/linux/arch/powerpc/lib/
H A Dcopy_32.S61 CACHELINE_BYTES = L1_CACHE_BYTES
63 CACHELINE_MASK = (L1_CACHE_BYTES-1)
211 #if L1_CACHE_BYTES >= 32
213 #if L1_CACHE_BYTES >= 64
216 #if L1_CACHE_BYTES >= 128
390 #if L1_CACHE_BYTES >= 32
392 #if L1_CACHE_BYTES >= 64
395 #if L1_CACHE_BYTES >= 128
448 #if L1_CACHE_BYTES >= 32
450 #if L1_CACHE_BYTES >
[all...]
/linux/arch/openrisc/include/asm/
H A Dcacheflush.h45 * Even if the actual block size is larger than L1_CACHE_BYTES, paddr
46 * can be incremented by L1_CACHE_BYTES. When paddr is written to the
52 local_dcache_range_flush(addr, addr + L1_CACHE_BYTES)
54 local_dcache_range_inv(addr, addr + L1_CACHE_BYTES)
56 local_icache_range_inv(addr, addr + L1_CACHE_BYTES)
/linux/arch/m68k/include/asm/
H A Dcache.h10 #define L1_CACHE_BYTES (1<< L1_CACHE_SHIFT) macro
12 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
/linux/tools/include/linux/
H A Dcache.h6 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro
8 #define SMP_CACHE_BYTES L1_CACHE_BYTES
/linux/arch/powerpc/kernel/
H A Dmisc_32.S170 rlwinm r5, r3, 0, L1_CACHE_BYTES - 1
185 addi r11,r11,L1_CACHE_BYTES
189 li r11,L1_CACHE_BYTES+4
191 li r0,PAGE_SIZE/L1_CACHE_BYTES - MAX_COPY_PREFETCH
199 #if L1_CACHE_BYTES >= 32
201 #if L1_CACHE_BYTES >= 64
204 #if L1_CACHE_BYTES >= 128
/linux/arch/riscv/kernel/
H A Dvmlinux-xip.lds.S52 RO_DATA(L1_CACHE_BYTES)
76 RW_DATA(L1_CACHE_BYTES, PAGE_SIZE, THREAD_SIZE)
122 PERCPU_SECTION(L1_CACHE_BYTES)
/linux/arch/arm/include/asm/
H A Dcache.h9 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro
18 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
/linux/arch/riscv/include/asm/
H A Dcache.h12 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro
15 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
/linux/arch/nios2/include/asm/
H A Dcache.h19 #define L1_CACHE_BYTES NIOS2_ICACHE_LINE_SIZE macro
21 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES

1234567