/linux-6.1.9/arch/xtensa/lib/ |
D | memcopy.S | 71 add a7, a3, a4 # a7 = end address for source 74 l8ui a6, a3, 0 75 addi a3, a3, 1 79 bne a3, a7, .Lnextbyte # continue loop if $a3:src != $a7:src_end 93 l8ui a6, a3, 0 94 addi a3, a3, 1 103 l8ui a6, a3, 0 104 l8ui a7, a3, 1 105 addi a3, a3, 2 116 # a2/ dst, a3/ src, a4/ len [all …]
|
D | usercopy.S | 69 # a2/ dst, a3/ src, a4/ len 79 bnone a3, a8, .Laligned # then use word copy 80 __ssa8 a3 # set shift amount from byte offset 93 EX(10f) l8ui a6, a3, 0 94 addi a3, a3, 1 103 EX(10f) l8ui a6, a3, 0 104 EX(10f) l8ui a7, a3, 1 105 addi a3, a3, 2 123 add a7, a3, a4 # a7 = end address for source 126 EX(10f) l8ui a6, a3, 0 [all …]
|
D | mulsi3.S | 38 mull a2, a2, a3 41 or a4, a2, a3 44 mul16u a2, a2, a3 48 srai a5, a3, 16 49 mul16u a7, a4, a3 51 mul16u a4, a2, a3 57 mul.aa.hl a2, a3 58 mula.aa.lh a2, a3 60 umul.aa.ll a2, a3 72 xor a5, a2, a3 /* Top bit is 1 if one input is negative. */ [all …]
|
D | divsi3.S | 10 quos a2, a2, a3 12 xor a7, a2, a3 /* sign = dividend ^ divisor */ 14 do_abs a3, a3, a4 /* udivisor = abs (divisor) */ 15 bltui a3, 2, .Lle_one /* check if udivisor <= 1 */ 17 do_nsau a4, a3, a2, a8 /* udivisor_shift = nsau (udivisor) */ 22 sll a3, a3 /* udivisor <<= count */ 30 bltu a6, a3, .Lzerobit 31 sub a6, a6, a3 35 srli a3, a3, 1 42 bltu a6, a3, .Lreturn [all …]
|
D | modsi3.S | 10 rems a2, a2, a3 14 do_abs a3, a3, a4 /* udivisor = abs (divisor) */ 15 bltui a3, 2, .Lle_one /* check if udivisor <= 1 */ 17 do_nsau a4, a3, a6, a8 /* udivisor_shift = nsau (udivisor) */ 22 sll a3, a3 /* udivisor <<= count */ 29 bltu a2, a3, .Lzerobit 30 sub a2, a2, a3 32 srli a3, a3, 1 40 bltu a2, a3, .Lreturn 41 sub a2, a2, a3 /* subtract again if udividend >= udivisor */ [all …]
|
D | umodsi3.S | 10 remu a2, a2, a3 12 bltui a3, 2, .Lle_one /* check if the divisor is <= 1 */ 15 do_nsau a4, a3, a6, a7 /* divisor_shift = nsau (divisor) */ 20 sll a3, a3 /* divisor <<= count */ 27 bltu a2, a3, .Lzerobit 28 sub a2, a2, a3 30 srli a3, a3, 1 38 bltu a2, a3, .Lreturn 39 sub a2, a2, a3 /* subtract once more if dividend >= divisor */ 44 bnez a3, .Lreturn0
|
D | memset.S | 38 # a2/ dst, a3/ c, a4/ length 39 extui a3, a3, 0, 8 # mask to just 8 bits 40 slli a7, a3, 8 # duplicate character in all bytes of word 41 or a3, a3, a7 # ... 42 slli a7, a3, 16 # ... 43 or a3, a3, a7 # ... 68 EX(10f) s32i a3, a5, 0 69 EX(10f) s32i a3, a5, 4 70 EX(10f) s32i a3, a5, 8 71 EX(10f) s32i a3, a5, 12 [all …]
|
D | checksum.S | 51 srli a5, a3, 5 /* 32-byte chunks */ 81 extui a5, a3, 2, 3 /* remaining 4-byte chunks */ 97 _bbci.l a3, 1, 5f /* remaining 2-byte chunk */ 102 _bbci.l a3, 0, 7f /* remaining 1-byte chunk */ 114 beqz a3, 7b /* branch if len == 0 */ 115 beqi a3, 1, 6b /* branch if len == 1 */ 123 addi a3, a3, -2 /* adjust len */ 131 srli a5, a3, 2 /* 4-byte chunks */ 157 _bbci.l a3, 1, 3f /* remaining 2-byte chunk, still odd addr */ 195 or a10, a2, a3 [all …]
|
D | udivsi3.S | 10 quou a2, a2, a3 12 bltui a3, 2, .Lle_one /* check if the divisor <= 1 */ 16 do_nsau a4, a3, a2, a7 /* divisor_shift = nsau (divisor) */ 21 sll a3, a3 /* divisor <<= count */ 29 bltu a6, a3, .Lzerobit 30 sub a6, a6, a3 34 srli a3, a3, 1 41 bltu a6, a3, .Lreturn 47 beqz a3, .Lerror /* if divisor == 1, return the dividend */ 52 bltu a6, a3, .Lreturn0
|
D | strncpy_user.S | 39 # a3/ src 53 # a2/ dst, a3/ src, a4/ len 60 bbsi.l a3, 0, .Lsrc1mod2 # if only 8-bit aligned 61 bbsi.l a3, 1, .Lsrc2mod4 # if only 16-bit aligned 69 EX(11f) l8ui a9, a3, 0 # get byte 0 70 addi a3, a3, 1 # advance src pointer 76 bbci.l a3, 1, .Lsrcaligned # if src is now word-aligned 79 EX(11f) l8ui a9, a3, 0 # get byte 0 86 EX(11f) l8ui a9, a3, 1 # get byte 0 87 addi a3, a3, 2 # advance src pointer [all …]
|
/linux-6.1.9/arch/xtensa/mm/ |
D | misc.S | 35 movi a3, 0 37 s32i a3, a2, 0 38 s32i a3, a2, 4 39 s32i a3, a2, 8 40 s32i a3, a2, 12 41 s32i a3, a2, 16 42 s32i a3, a2, 20 43 s32i a3, a2, 24 44 s32i a3, a2, 28 64 l32i a8, a3, 0 [all …]
|
/linux-6.1.9/arch/xtensa/kernel/ |
D | entry.S | 129 s32i a3, a2, PT_AREG3 139 rsr a3, sar 141 s32i a3, a1, PT_SAR 154 rsr a3, windowstart 157 s32i a3, a1, PT_WINDOWSTART 158 slli a2, a3, 32-WSBITS 159 src a2, a3, a2 164 movi a3, 1 166 s32i a3, a1, PT_WINDOWSTART 167 s32i a3, a1, PT_WMASK [all …]
|
D | align.S | 175 s32i a3, a2, PT_AREG3 177 rsr a3, excsave1 179 s32i a4, a3, EXC_TABLE_FIXUP 204 movi a3, ~3 205 and a3, a3, a7 # mask lower bits 207 l32i a4, a3, 0 # load 2 words 208 l32i a5, a3, 4 234 .Lload: movi a3, ~3 235 and a3, a3, a8 # align memory address 239 addi a3, a3, 8 [all …]
|
D | vectors.S | 74 xsr a3, excsave1 # save a3 and get dispatch table 76 l32i a2, a3, EXC_TABLE_KSTK # load kernel stack to a2 80 addx4 a0, a0, a3 # find entry in table 82 xsr a3, excsave1 # restore a3 and dispatch table 101 xsr a3, excsave1 # save a3, and get dispatch table 107 addx4 a0, a0, a3 # find entry in table 109 xsr a3, excsave1 # restore a3 and dispatch table 212 xsr a3, excsave1 213 s32i a2, a3, EXC_TABLE_DOUBLE_SAVE 237 l32i a2, a3, EXC_TABLE_KSTK [all …]
|
D | head.S | 90 movi a3, XCHAL_KSEG_PADDR 91 bltu a2, a3, 1f 92 sub a2, a2, a3 93 movi a3, XCHAL_KSEG_SIZE 94 bgeu a2, a3, 1f 95 movi a3, XCHAL_KSEG_CACHED_VADDR 96 add a2, a2, a3 174 ___unlock_dcache_all a2 a3 178 ___unlock_icache_all a2 a3 181 ___invalidate_dcache_all a2 a3 [all …]
|
D | coprocessor.S | 52 xchal_cp##x##_store a2 a3 a4 a5 a6; \ 60 xchal_cp##x##_load a2 a3 a4 a5 a6; \ 124 s32i a3, a2, PT_AREG3 133 l32i a3, a0, THREAD_CPENABLE 134 beqz a3, .Lload_local 141 l32i a3, a0, THREAD_CPU 143 beq a0, a3, .Lload_local 146 l32i a3, a2, PT_AREG3 155 rsr a3, sar 157 s32i a3, a2, PT_SAR [all …]
|
/linux-6.1.9/arch/csky/kernel/ |
D | atomic.S | 18 RD_MEH a3 19 WR_MEH a3 21 mfcr a3, epc 22 addi a3, TRAP0_SIZE 25 stw a3, (sp, 0) 26 mfcr a3, epsr 27 stw a3, (sp, 4) 28 mfcr a3, usp 29 stw a3, (sp, 8) 34 ldex a3, (a2) [all …]
|
/linux-6.1.9/arch/csky/abiv2/ |
D | strcpy.S | 8 mov a3, a0 17 stw a2, (a3) 22 stw a2, (a3, 4) 27 stw a2, (a3, 8) 32 stw a2, (a3, 12) 37 stw a2, (a3, 16) 42 stw a2, (a3, 20) 47 stw a2, (a3, 24) 52 stw a2, (a3, 28) 54 addi a3, 32 [all …]
|
D | strcmp.S | 8 mov a3, a0 10 xor a2, a3, a1 18 ldw t0, (a3, 0) 28 ldw t0, (a3, 4) 35 ldw t0, (a3, 8) 42 ldw t0, (a3, 12) 49 ldw t0, (a3, 16) 56 ldw t0, (a3, 20) 63 ldw t0, (a3, 24) 70 ldw t0, (a3, 28) [all …]
|
/linux-6.1.9/drivers/block/ |
D | swim_asm.S | 61 movel %a6@(0x08), %a3 62 lea %a3@(read_handshake), %a2 63 lea %a3@(read_mark), %a3 68 tstb %a3@(read_error - read_mark) 69 moveb #0x18, %a3@(write_mode0 - read_mark) 70 moveb #0x01, %a3@(write_mode1 - read_mark) 71 moveb #0x01, %a3@(write_mode0 - read_mark) 72 tstb %a3@(read_error - read_mark) 73 moveb #0x08, %a3@(write_mode1 - read_mark) 84 moveb %a3@, %d3 [all …]
|
/linux-6.1.9/arch/xtensa/include/asm/ |
D | initialize_mmu.h | 49 movi a3, 0x25 /* For SMP/MX -- internal for writeback, 53 movi a3, 0x29 /* non-MX -- Most cores use Std Memory 57 wsr a3, atomctl 95 srli a3, a0, 27 96 slli a3, a3, 27 97 addi a3, a3, CA_BYPASS 99 wdtlb a3, a7 100 witlb a3, a7 196 movi a3, .Lattribute_table 209 addx4 a9, a8, a3 [all …]
|
/linux-6.1.9/arch/riscv/lib/ |
D | memset.S | 16 sltiu a3, a2, 16 17 bnez a3, 4f 23 addi a3, t0, SZREG-1 24 andi a3, a3, ~(SZREG-1) 25 beq a3, t0, 2f /* Skip if already aligned */ 27 sub a4, a3, t0 31 bltu t0, a3, 1b 37 slli a3, a1, 8 38 or a1, a3, a1 39 slli a3, a1, 16 [all …]
|
D | memcpy.S | 15 sltiu a3, a2, 128 16 bnez a3, 4f 18 andi a3, t6, SZREG-1 20 bne a3, a4, 4f 22 beqz a3, 2f /* Skip if already aligned */ 27 andi a3, a1, ~(SZREG-1) 28 addi a3, a3, SZREG 30 sub a4, a3, a1 36 bltu a1, a3, 1b 42 add a3, a1, a4 [all …]
|
/linux-6.1.9/arch/riscv/kernel/ |
D | head.S | 150 la a3, .Lsecondary_park 151 csrw CSR_TVEC, a3 158 li a3, SBI_HART_BOOT_STACK_PTR_OFFSET 159 XIP_FIXUP_OFFSET a3 160 add a3, a3, a1 161 REG_L sp, (a3) 252 la a3, hart_lottery 254 amoadd.w a3, a2, (a3) 255 bnez a3, .Lsecondary_start 259 la a3, hart_lottery [all …]
|
/linux-6.1.9/arch/mips/kernel/ |
D | linux32.c | 54 unsigned long, __dummy, unsigned long, a2, unsigned long, a3) 56 return ksys_truncate(path, merge_64(a2, a3)); 60 unsigned long, a2, unsigned long, a3) 62 return ksys_ftruncate(fd, merge_64(a2, a3)); 102 asmlinkage ssize_t sys32_readahead(int fd, u32 pad0, u64 a2, u64 a3, in sys32_readahead() argument 105 return ksys_readahead(fd, merge_64(a2, a3), count); in sys32_readahead() 109 unsigned long a2, unsigned long a3, in sys32_sync_file_range() argument 114 merge_64(a2, a3), merge_64(a4, a5), in sys32_sync_file_range() 119 unsigned long a2, unsigned long a3, in sys32_fadvise64_64() argument 124 merge_64(a2, a3), merge_64(a4, a5), in sys32_fadvise64_64()
|