Lines Matching refs:size

27 static void clean_cache_range(void *addr, size_t size)  in clean_cache_range()  argument
31 void *vend = addr + size; in clean_cache_range()
39 void arch_wb_cache_pmem(void *addr, size_t size) in arch_wb_cache_pmem() argument
41 clean_cache_range(addr, size); in arch_wb_cache_pmem()
45 long __copy_user_flushcache(void *dst, const void __user *src, unsigned size) in __copy_user_flushcache() argument
48 long rc = __copy_user_nocache(dst, src, size, 0); in __copy_user_flushcache()
58 if (size < 8) { in __copy_user_flushcache()
59 if (!IS_ALIGNED(dest, 4) || size != 4) in __copy_user_flushcache()
60 clean_cache_range(dst, size); in __copy_user_flushcache()
68 if (size > flushed && !IS_ALIGNED(size - flushed, 8)) in __copy_user_flushcache()
69 clean_cache_range(dst + size - 1, 1); in __copy_user_flushcache()
75 void __memcpy_flushcache(void *_dst, const void *_src, size_t size) in __memcpy_flushcache() argument
82 size_t len = min_t(size_t, size, ALIGN(dest, 8) - dest); in __memcpy_flushcache()
88 size -= len; in __memcpy_flushcache()
89 if (!size) in __memcpy_flushcache()
94 while (size >= 32) { in __memcpy_flushcache()
107 size -= 32; in __memcpy_flushcache()
111 while (size >= 8) { in __memcpy_flushcache()
118 size -= 8; in __memcpy_flushcache()
122 while (size >= 4) { in __memcpy_flushcache()
129 size -= 4; in __memcpy_flushcache()
133 if (size) { in __memcpy_flushcache()
134 memcpy((void *) dest, (void *) source, size); in __memcpy_flushcache()
135 clean_cache_range((void *) dest, size); in __memcpy_flushcache()