Searched refs:CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS (Results 1 – 25 of 48) sorted by relevance
12
21 if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) { in __crypto_xor()42 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) { in __crypto_xor()56 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) { in __crypto_xor()70 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) { in __crypto_xor()
72 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) in __crypto_memneq_generic()97 #ifdef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS in __crypto_memneq_16()
89 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) in is_link_local_ether_addr()107 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) in is_zero_ether_addr()125 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) in is_multicast_ether_addr()139 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64 in is_multicast_ether_addr_64bits()297 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) in ether_addr_copy()348 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) in ether_addr_equal()377 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64 in ether_addr_equal_64bits()401 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) in ether_addr_equal_unaligned()552 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64 in compare_ether_header()
84 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) || in siphash()134 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) || in hsiphash()
27 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64 in nf_inet_addr_cmp()44 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64 in nf_inet_addr_mask()
10 #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS in flat_get_addr_from_rp()19 #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS in flat_put_addr_at_rp()
134 #ifdef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS223 所以可以使代码依赖于CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS,像这样::225 #ifdef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
95 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) in lzo1x_decompress_safe()213 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) in lzo1x_decompress_safe()255 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) in lzo1x_decompress_safe()
54 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && \ in lzo1x_1_do_compress()164 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && defined(LZO_USE_CTZ64) in lzo1x_1_do_compress()184 #elif defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && defined(LZO_USE_CTZ32) in lzo1x_1_do_compress()
558 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64 in ipv6_masked_addr_cmp()605 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64 in __ipv6_addr_set_half()633 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64 in ipv6_addr_equal()646 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64695 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64 in ipv6_addr_any()707 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64 in ipv6_addr_hash()731 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64 in ipv6_addr_loopback()748 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64 in ipv6_addr_v4mapped()837 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64857 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64 in __ipv6_addr_diff()
448 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64 in ipv6_addr_is_ll_all_nodes()460 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64 in ipv6_addr_is_ll_all_routers()477 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64 in ipv6_addr_is_solict_mult()492 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64 in ipv6_addr_is_all_snoopers()
27 if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) in copy_from_kernel_nofault()61 if (!IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) in copy_to_kernel_nofault()
154 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && in crypto_xor()174 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && in crypto_xor_cpy()
47 #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
15 #ifdef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
45 #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS246 #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS408 #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS
184 #ifdef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS in strscpy()760 #ifdef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS in memcmp()
38 #ifndef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS in is_aligned()
47 #ifdef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS in needs_unaligned_copy()
34 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) in efx_rx_buf_hash()
481 if (IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS)) \494 type __val = IS_ENABLED(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) \
80 unaligned accesses (CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS=y). One could
272 #if defined(CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS) && BITS_PER_LONG == 64 in rds_addr_cmp()
155 #ifdef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS259 made dependent on CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS like so::261 #ifdef CONFIG_HAVE_EFFICIENT_UNALIGNED_ACCESS