/linux-6.1.9/include/asm-generic/ |
D | xor.h | 56 const unsigned long * __restrict p4) in xor_8regs_4() argument 61 p1[0] ^= p2[0] ^ p3[0] ^ p4[0]; in xor_8regs_4() 62 p1[1] ^= p2[1] ^ p3[1] ^ p4[1]; in xor_8regs_4() 63 p1[2] ^= p2[2] ^ p3[2] ^ p4[2]; in xor_8regs_4() 64 p1[3] ^= p2[3] ^ p3[3] ^ p4[3]; in xor_8regs_4() 65 p1[4] ^= p2[4] ^ p3[4] ^ p4[4]; in xor_8regs_4() 66 p1[5] ^= p2[5] ^ p3[5] ^ p4[5]; in xor_8regs_4() 67 p1[6] ^= p2[6] ^ p3[6] ^ p4[6]; in xor_8regs_4() 68 p1[7] ^= p2[7] ^ p3[7] ^ p4[7]; in xor_8regs_4() 72 p4 += 8; in xor_8regs_4() [all …]
|
/linux-6.1.9/arch/x86/include/asm/ |
D | xor_32.h | 121 const unsigned long * __restrict p4) in xor_pII_mmx_4() argument 166 "+r" (p1), "+r" (p2), "+r" (p3), "+r" (p4) in xor_pII_mmx_4() 178 const unsigned long * __restrict p4, in xor_pII_mmx_5() argument 191 asm("" : "+r" (p4), "+r" (p5)); in xor_pII_mmx_5() 238 : "r" (p4), "r" (p5) in xor_pII_mmx_5() 244 asm("" : "=r" (p4), "=r" (p5)); in xor_pII_mmx_5() 367 const unsigned long * __restrict p4) in xor_p5_mmx_4() argument 424 "+r" (p1), "+r" (p2), "+r" (p3), "+r" (p4) in xor_p5_mmx_4() 435 const unsigned long * __restrict p4, in xor_p5_mmx_5() argument 448 asm("" : "+r" (p4), "+r" (p5)); in xor_p5_mmx_5() [all …]
|
D | xor.h | 248 const unsigned long * __restrict p4) in xor_sse_4() argument 305 [p2] "+r" (p2), [p3] "+r" (p3), [p4] "+r" (p4) in xor_sse_4() 316 const unsigned long * __restrict p4) in xor_sse_4_pf64() argument 346 [p2] "+r" (p2), [p3] "+r" (p3), [p4] "+r" (p4) in xor_sse_4_pf64() 357 const unsigned long * __restrict p4, in xor_sse_5() argument 422 [p3] "+r" (p3), [p4] "+r" (p4), [p5] "+r" (p5) in xor_sse_5() 433 const unsigned long * __restrict p4, in xor_sse_5_pf64() argument 466 [p3] "+r" (p3), [p4] "+r" (p4), [p5] "+r" (p5) in xor_sse_5_pf64()
|
D | xor_avx.h | 126 const unsigned long * __restrict p4) in xor_avx_5() argument 136 asm volatile("vmovdqa %0, %%ymm" #reg : : "m" (p4[i / sizeof(*p4)])); \ in xor_avx_5() 155 p4 = (unsigned long *)((uintptr_t)p4 + 512); in xor_avx_5()
|
D | kvm_para.h | 94 unsigned long p4) in kvm_hypercall4() argument 99 return tdx_kvm_hypercall(nr, p1, p2, p3, p4); in kvm_hypercall4() 103 : "a"(nr), "b"(p1), "c"(p2), "d"(p3), "S"(p4) in kvm_hypercall4()
|
D | tdx.h | 81 unsigned long p3, unsigned long p4); 85 unsigned long p4) in tdx_kvm_hypercall() argument
|
/linux-6.1.9/arch/arm/include/asm/ |
D | xor.h | 94 const unsigned long * __restrict p4) in xor_arm4regs_4() argument 106 XOR_BLOCK_2(p4); in xor_arm4regs_4() 115 const unsigned long * __restrict p4, in xor_arm4regs_5() argument 128 XOR_BLOCK_2(p4); in xor_arm4regs_5() 186 const unsigned long * __restrict p4) in xor_neon_4() argument 189 xor_arm4regs_4(bytes, p1, p2, p3, p4); in xor_neon_4() 192 xor_block_neon_inner.do_4(bytes, p1, p2, p3, p4); in xor_neon_4() 201 const unsigned long * __restrict p4, in xor_neon_5() argument 205 xor_arm4regs_5(bytes, p1, p2, p3, p4, p5); in xor_neon_5() 208 xor_block_neon_inner.do_5(bytes, p1, p2, p3, p4, p5); in xor_neon_5()
|
/linux-6.1.9/arch/sparc/include/asm/ |
D | xor_32.h | 109 const unsigned long * __restrict p4) in sparc_4() argument 160 : "r" (p1), "r" (p2), "r" (p3), "r" (p4) in sparc_4() 167 p4 += 8; in sparc_4() 175 const unsigned long * __restrict p4, in sparc_5() argument 239 : "r" (p1), "r" (p2), "r" (p3), "r" (p4), "r" (p5) in sparc_5() 246 p4 += 8; in sparc_5()
|
D | xor_64.h | 23 const unsigned long * __restrict p4); 27 const unsigned long * __restrict p4, 48 const unsigned long * __restrict p4); 52 const unsigned long * __restrict p4,
|
/linux-6.1.9/arch/s390/lib/ |
D | xor.c | 66 const unsigned long * __restrict p4) in xor_xc_4() argument 91 : "+d" (bytes), "+a" (p1), "+a" (p2), "+a" (p3), "+a" (p4) in xor_xc_4() 98 const unsigned long * __restrict p4, in xor_xc_5() argument 128 : "+d" (bytes), "+a" (p1), "+a" (p2), "+a" (p3), "+a" (p4), in xor_xc_5()
|
/linux-6.1.9/tools/testing/selftests/ftrace/test.d/instances/ |
D | instance.tc | 51 p4=$! 52 echo $p4 64 kill -1 $p4 68 wait $p1 $p2 $p3 $p4 $p5
|
D | instance-event.tc | 111 p4=$! 112 echo $p4 124 kill -1 $p4 128 wait $p1 $p2 $p3 $p4 $p5
|
/linux-6.1.9/arch/powerpc/lib/ |
D | xor_vmx_glue.c | 41 const unsigned long * __restrict p4) in xor_altivec_4() argument 45 __xor_altivec_4(bytes, p1, p2, p3, p4); in xor_altivec_4() 54 const unsigned long * __restrict p4, in xor_altivec_5() argument 59 __xor_altivec_5(bytes, p1, p2, p3, p4, p5); in xor_altivec_5()
|
D | xor_vmx.h | 17 const unsigned long * __restrict p4); 21 const unsigned long * __restrict p4,
|
/linux-6.1.9/arch/arm64/include/asm/ |
D | xor.h | 41 const unsigned long * __restrict p4) in xor_neon_4() argument 44 xor_block_inner_neon.do_4(bytes, p1, p2, p3, p4); in xor_neon_4() 52 const unsigned long * __restrict p4, in xor_neon_5() argument 56 xor_block_inner_neon.do_5(bytes, p1, p2, p3, p4, p5); in xor_neon_5()
|
/linux-6.1.9/arch/alpha/include/asm/ |
D | xor.h | 19 const unsigned long * __restrict p4); 24 const unsigned long * __restrict p4, 38 const unsigned long * __restrict p4); 43 const unsigned long * __restrict p4,
|
/linux-6.1.9/drivers/misc/cxl/ |
D | hcalls.c | 206 u64 p1, u64 p2, u64 p3, u64 p4, u64 *out) in cxl_h_control_function() argument 211 CXL_H9_WAIT_UNTIL_DONE(rc, retbuf, H_CONTROL_CA_FUNCTION, unit_address, op, p1, p2, p3, p4); in cxl_h_control_function() 213 unit_address, OP_STR_AFU(op), p1, p2, p3, p4, retbuf[0], rc); in cxl_h_control_function() 214 trace_cxl_hcall_control_function(unit_address, OP_STR_AFU(op), p1, p2, p3, p4, retbuf[0], rc); in cxl_h_control_function() 479 u64 p1, u64 p2, u64 p3, u64 p4, u64 *out) in cxl_h_control_facility() argument 484 CXL_H9_WAIT_UNTIL_DONE(rc, retbuf, H_CONTROL_CA_FACILITY, unit_address, op, p1, p2, p3, p4); in cxl_h_control_facility() 486 unit_address, OP_STR_CONTROL_ADAPTER(op), p1, p2, p3, p4, retbuf[0], rc); in cxl_h_control_facility() 487 …trace_cxl_hcall_control_facility(unit_address, OP_STR_CONTROL_ADAPTER(op), p1, p2, p3, p4, retbuf[… in cxl_h_control_facility()
|
D | trace.h | 518 u64 p4, unsigned long r4, long rc), 520 TP_ARGS(unit_address, fct, p1, p2, p3, p4, r4, rc), 528 __field(u64, p4) 539 __entry->p4 = p4; 550 __entry->p4, 599 u64 p4, unsigned long r4, long rc), 600 TP_ARGS(unit_address, fct, p1, p2, p3, p4, r4, rc) 647 u64 p4, unsigned long r4, long rc), 648 TP_ARGS(unit_address, fct, p1, p2, p3, p4, r4, rc)
|
/linux-6.1.9/arch/arm64/lib/ |
D | xor-neon.c | 79 const unsigned long * __restrict p4) in xor_arm64_neon_4() argument 84 uint64_t *dp4 = (uint64_t *)p4; in xor_arm64_neon_4() 124 const unsigned long * __restrict p4, in xor_arm64_neon_5() argument 130 uint64_t *dp4 = (uint64_t *)p4; in xor_arm64_neon_5() 233 const unsigned long * __restrict p4) in xor_arm64_eor3_4() argument 238 uint64_t *dp4 = (uint64_t *)p4; in xor_arm64_eor3_4() 277 const unsigned long * __restrict p4, in xor_arm64_eor3_5() argument 283 uint64_t *dp4 = (uint64_t *)p4; in xor_arm64_eor3_5()
|
/linux-6.1.9/arch/powerpc/include/asm/ |
D | xor_altivec.h | 14 const unsigned long * __restrict p4); 18 const unsigned long * __restrict p4,
|
/linux-6.1.9/arch/ia64/include/asm/ |
D | xor.h | 15 const unsigned long * __restrict p4); 19 const unsigned long * __restrict p4,
|
/linux-6.1.9/crypto/ |
D | xor.c | 29 unsigned long *p1, *p2, *p3, *p4; in xor_blocks() local 49 p4 = (unsigned long *) srcs[3]; in xor_blocks() 50 active_template->do_5(bytes, dest, p1, p2, p3, p4); in xor_blocks()
|
/linux-6.1.9/arch/ia64/lib/ |
D | memcpy.S | 212 (p4) st2 [dst]=t0,2 213 (p4) shr.u t0=t0,16 214 (p4) adds cnt=2,cnt 266 (p4) st2 [dst]=t0,2 267 (p4) shr.u t0=t0,16
|
/linux-6.1.9/arch/powerpc/mm/ |
D | hugetlbpage.c | 114 p4d_t *p4; in huge_pte_alloc() local 124 p4 = p4d_offset(pg, addr); in huge_pte_alloc() 129 return (pte_t *) p4; in huge_pte_alloc() 135 hpdp = (hugepd_t *)p4; in huge_pte_alloc() 138 pu = pud_alloc(mm, p4, addr); in huge_pte_alloc() 163 hpdp = (hugepd_t *)p4; in huge_pte_alloc() 166 pu = pud_alloc(mm, p4, addr); in huge_pte_alloc()
|
/linux-6.1.9/tools/testing/selftests/net/ |
D | fib-onlink-tests.sh | 16 NETIFS[p4]=veth4 28 V4ADDRS[p4]=169.254.3.2 39 V6ADDRS[p4]=2001:db8:301::2 190 ip li add ${NETIFS[p3]} type veth peer name ${NETIFS[p4]}
|