Lines Matching refs:imm

113 				  u32 insn, u64 imm)  in aarch64_insn_encode_immediate()  argument
124 immlo = (imm & ADR_IMM_LOMASK) << ADR_IMM_LOSHIFT; in aarch64_insn_encode_immediate()
125 imm >>= ADR_IMM_HILOSPLIT; in aarch64_insn_encode_immediate()
126 immhi = (imm & ADR_IMM_HIMASK) << ADR_IMM_HISHIFT; in aarch64_insn_encode_immediate()
127 imm = immlo | immhi; in aarch64_insn_encode_immediate()
141 insn |= (imm & mask) << shift; in aarch64_insn_encode_immediate()
412 unsigned int imm, in aarch64_insn_gen_load_store_imm() argument
425 if (imm & ~(BIT(12 + shift) - BIT(shift))) { in aarch64_insn_gen_load_store_imm()
426 pr_err("%s: invalid imm: %d\n", __func__, imm); in aarch64_insn_gen_load_store_imm()
430 imm >>= shift; in aarch64_insn_gen_load_store_imm()
454 return aarch64_insn_encode_immediate(AARCH64_INSN_IMM_12, insn, imm); in aarch64_insn_gen_load_store_imm()
732 int imm, enum aarch64_insn_variant variant, in aarch64_insn_gen_add_sub_imm() argument
767 if (imm & ~(BIT(24) - 1)) in aarch64_insn_gen_add_sub_imm()
771 if (imm & ~(SZ_4K - 1)) { in aarch64_insn_gen_add_sub_imm()
773 if (imm & (SZ_4K - 1)) in aarch64_insn_gen_add_sub_imm()
776 imm >>= 12; in aarch64_insn_gen_add_sub_imm()
784 return aarch64_insn_encode_immediate(AARCH64_INSN_IMM_12, insn, imm); in aarch64_insn_gen_add_sub_imm()
787 pr_err("%s: invalid immediate encoding %d\n", __func__, imm); in aarch64_insn_gen_add_sub_imm()
847 int imm, int shift, in aarch64_insn_gen_movewide() argument
868 if (imm & ~(SZ_64K - 1)) { in aarch64_insn_gen_movewide()
869 pr_err("%s: invalid immediate encoding %d\n", __func__, imm); in aarch64_insn_gen_movewide()
898 return aarch64_insn_encode_immediate(AARCH64_INSN_IMM_16, insn, imm); in aarch64_insn_gen_movewide()
1214 s32 imm; in aarch64_get_branch_offset() local
1217 imm = aarch64_insn_decode_immediate(AARCH64_INSN_IMM_26, insn); in aarch64_get_branch_offset()
1218 return (imm << 6) >> 4; in aarch64_get_branch_offset()
1223 imm = aarch64_insn_decode_immediate(AARCH64_INSN_IMM_19, insn); in aarch64_get_branch_offset()
1224 return (imm << 13) >> 11; in aarch64_get_branch_offset()
1228 imm = aarch64_insn_decode_immediate(AARCH64_INSN_IMM_14, insn); in aarch64_get_branch_offset()
1229 return (imm << 18) >> 16; in aarch64_get_branch_offset()
1315 static u32 aarch64_encode_immediate(u64 imm, in aarch64_encode_immediate() argument
1338 if (!imm || imm == mask || imm & ~mask) in aarch64_encode_immediate()
1348 if ((imm & emask) != ((imm >> tmp) & emask)) in aarch64_encode_immediate()
1359 imm &= mask; in aarch64_encode_immediate()
1362 ones = hweight64(imm); in aarch64_encode_immediate()
1373 if (range_of_ones(imm)) { in aarch64_encode_immediate()
1379 ror = __ffs64(imm); in aarch64_encode_immediate()
1388 imm |= ~mask; in aarch64_encode_immediate()
1389 if (!range_of_ones(~imm)) in aarch64_encode_immediate()
1396 ror = fls64(~imm); in aarch64_encode_immediate()
1415 u64 imm) in aarch64_insn_gen_logical_immediate() argument
1439 return aarch64_encode_immediate(imm, variant, insn); in aarch64_insn_gen_logical_immediate()