/linux/arch/arm/mach-at91/ |
A D | pm_suspend.S | 134 bic tmp1, tmp1, #0x1 138 bic tmp1, tmp1, #0x1 142 bic tmp1, tmp1, #0x1 146 bic tmp1, tmp1, #0x1 150 bic tmp1, tmp1, #0x1 291 orr tmp1, tmp1, #0x1 295 orr tmp1, tmp1, #0x1 299 orr tmp1, tmp1, #0x1 303 orr tmp1, tmp1, #0x1 307 orr tmp1, tmp1, #0x1 [all …]
|
/linux/arch/arm64/include/asm/ |
A D | asm_pointer_auth.h | 13 mov \tmp1, #THREAD_KEYS_KERNEL 14 add \tmp1, \tsk, \tmp1 15 ldp \tmp2, \tmp3, [\tmp1, #PTRAUTH_KERNEL_KEY_APIA] 53 mov \tmp1, #THREAD_KEYS_USER 54 add \tmp1, \tsk, \tmp1 55 ldp \tmp2, \tmp3, [\tmp1, #PTRAUTH_USER_KEY_APIA] 61 mrs \tmp1, id_aa64isar1_el1 62 ubfx \tmp1, \tmp1, #ID_AA64ISAR1_APA_SHIFT, #8 63 cbz \tmp1, .Lno_addr_auth\@ 64 mov_q \tmp1, (SCTLR_ELx_ENIA | SCTLR_ELx_ENIB | \ [all …]
|
A D | asm-uaccess.h | 16 .macro __uaccess_ttbr0_disable, tmp1 17 mrs \tmp1, ttbr1_el1 // swapper_pg_dir 18 bic \tmp1, \tmp1, #TTBR_ASID_MASK 19 sub \tmp1, \tmp1, #RESERVED_SWAPPER_OFFSET // reserved_pg_dir 22 add \tmp1, \tmp1, #RESERVED_SWAPPER_OFFSET 27 .macro __uaccess_ttbr0_enable, tmp1, tmp2 28 get_current_task \tmp1 29 ldr \tmp1, [\tmp1, #TSK_TI_TTBR0] // load saved TTBR0_EL1 31 extr \tmp2, \tmp2, \tmp1, #48 42 __uaccess_ttbr0_disable \tmp1 [all …]
|
/linux/arch/sparc/include/asm/ |
A D | head_64.h | 37 lduw [%tmp1 + %lo(is_sun4v)], %tmp1; \ 42 rdpr %ver, %tmp1; \ 44 srlx %tmp1, 32, %tmp1; \ 46 cmp %tmp1, %tmp2; \ 51 rdpr %ver, %tmp1; \ 53 srlx %tmp1, 32, %tmp1; \ 55 cmp %tmp1, %tmp2; \ 60 rdpr %ver, %tmp1; \ 64 sllx %tmp1, 16, %tmp1; \ 71 rdpr %ver, %tmp1; \ [all …]
|
/linux/arch/arm/mach-tegra/ |
A D | sleep.h | 82 mrc p15, 0, \tmp1, c0, c0, 0 83 ubfx \tmp1, \tmp1, #4, #12 85 cmp \tmp1, \tmp2 89 .macro exit_smp, tmp1, tmp2 91 bic \tmp1, \tmp1, #(1<<6) | (1<<0) @ clear ACTLR.SMP | ACTLR.FW 97 andeq \tmp1, \tmp1, #0xF 98 moveq \tmp1, \tmp1, lsl #2 110 mov32 \tmp1, \base 111 ldr \tmp1, [\tmp1, #APB_MISC_GP_HIDREV] 112 and \tmp1, \tmp1, #0xff00 [all …]
|
/linux/arch/arm64/lib/ |
A D | strlen.S | 30 #define tmp1 x4 macro 109 clz tmp1, has_nul1 126 orr tmp2, tmp1, tmp3 132 orr tmp2, tmp1, tmp3 163 clz tmp1, has_nul1 195 lsl tmp1, srcin, 3 199 lsr tmp1, tmp4, tmp1 /* Shift (tmp1 & 63). */ 202 lsl tmp1, tmp4, tmp1 /* Shift (tmp1 & 63). */ 204 orr tmp1, tmp1, REP8_80 205 orn data1, data1, tmp1 [all …]
|
A D | csum.c | 63 __uint128_t tmp1, tmp2, tmp3, tmp4; in do_csum() local 65 tmp1 = *(__uint128_t *)ptr; in do_csum() 74 tmp1 += (tmp1 >> 64) | (tmp1 << 64); in do_csum() 78 tmp1 = ((tmp1 >> 64) << 64) | (tmp2 >> 64); in do_csum() 79 tmp1 += (tmp1 >> 64) | (tmp1 << 64); in do_csum() 82 tmp1 = ((tmp1 >> 64) << 64) | (tmp3 >> 64); in do_csum() 83 tmp1 += (tmp1 >> 64) | (tmp1 << 64); in do_csum() 84 tmp1 = ((tmp1 >> 64) << 64) | sum64; in do_csum() 85 tmp1 += (tmp1 >> 64) | (tmp1 << 64); in do_csum() 86 sum64 = tmp1 >> 64; in do_csum()
|
A D | strcmp.S | 36 #define tmp1 x7 macro 45 eor tmp1, src1, src2 47 tst tmp1, #7 49 ands tmp1, src1, #7 58 sub tmp1, data1, zeroones 98 sub tmp1, tmp3, zeroones 100 bic has_nul, tmp1, tmp2 123 lsl tmp1, tmp1, #3 /* Bytes beyond alignment -> bits. */ 125 neg tmp1, tmp1 /* Bits to alignment -64. */ 157 and tmp1, src2, #0xff8 [all …]
|
A D | strnlen.S | 38 tmp1 .req x8 label 54 ands tmp1, srcin, #15 74 sub tmp1, data1, zeroones 78 bic has_nul1, tmp1, tmp2 81 orr tmp1, has_nul1, has_nul2 82 ccmp tmp1, #0, #0, pl /* NZCV = 0000 */ 106 CPU_BE( sub tmp1, data2, zeroones ) 108 CPU_BE( bic has_nul2, tmp1, tmp2 ) 135 add tmp3, tmp3, tmp1 138 neg tmp4, tmp1 [all …]
|
A D | memcpy.S | 45 #define tmp1 x14 macro 102 lsr tmp1, count, 1 105 ldrb B_lw, [src, tmp1] 107 strb B_lw, [dstin, tmp1] 151 sub tmp1, dstin, src 152 cbz tmp1, L(copy0) 153 cmp tmp1, count 159 and tmp1, dstin, 15 161 sub src, src, tmp1 205 and tmp1, dstend, 15 [all …]
|
/linux/arch/mips/crypto/ |
A D | poly1305-mips.pl | 175 dsll $tmp1,24 192 or $in0,$tmp1 198 or $in0,$tmp1 333 dsll $tmp1,24 350 or $in0,$tmp1 368 sltu $tmp1,$d0,$tmp1 470 daddu $tmp1,$tmp1,$in1 679 sll $tmp1,$tmp1,8 697 sll $tmp1,$tmp1,8 1178 addu $tmp1,$tmp1,$ctx [all …]
|
/linux/tools/testing/selftests/bpf/prog_tests/ |
A D | mmap.c | 57 munmap(tmp1, page_size); in test_mmap() 183 munmap(tmp1, map_sz); in test_mmap() 208 if (CHECK(tmp0 != tmp1, "adv_mmap1", "tmp0: %p, tmp1: %p\n", tmp0, tmp1)) { in test_mmap() 214 err = munmap(tmp1 + page_size, page_size); in test_mmap() 216 munmap(tmp1, 4 * page_size); in test_mmap() 224 munmap(tmp1, page_size); in test_mmap() 229 "tmp1: %p, tmp2: %p\n", tmp1, tmp2); in test_mmap() 238 CHECK(tmp1 != tmp2, "adv_mmap6", "tmp1: %p, tmp2: %p\n", tmp1, tmp2); in test_mmap() 256 munmap(tmp1, 4 * page_size); in test_mmap() 274 munmap(tmp1, map_sz); in test_mmap() [all …]
|
/linux/arch/s390/lib/ |
A D | uaccess.c | 64 unsigned long tmp1, tmp2; in copy_from_user_mvcos() local 66 tmp1 = -4096UL; in copy_from_user_mvcos() 95 unsigned long tmp1, tmp2; in copy_from_user_mvcp() local 97 tmp1 = -256UL; in copy_from_user_mvcp() 137 unsigned long tmp1, tmp2; in copy_to_user_mvcos() local 139 tmp1 = -4096UL; in copy_to_user_mvcos() 168 unsigned long tmp1, tmp2; in copy_to_user_mvcs() local 170 tmp1 = -256UL; in copy_to_user_mvcs() 209 unsigned long tmp1, tmp2; in clear_user_mvcos() local 211 tmp1 = -4096UL; in clear_user_mvcos() [all …]
|
/linux/tools/lib/ |
A D | rbtree.c | 259 sibling = tmp1; in ____rb_erase_color() 262 if (!tmp1 || rb_is_black(tmp1)) { in ____rb_erase_color() 319 tmp1 = tmp2->rb_right; in ____rb_erase_color() 323 if (tmp1) in ____rb_erase_color() 327 tmp1 = sibling; in ____rb_erase_color() 363 sibling = tmp1; in ____rb_erase_color() 365 tmp1 = sibling->rb_left; in ____rb_erase_color() 366 if (!tmp1 || rb_is_black(tmp1)) { in ____rb_erase_color() 383 tmp1 = tmp2->rb_left; in ____rb_erase_color() 387 if (tmp1) in ____rb_erase_color() [all …]
|
/linux/lib/ |
A D | rbtree.c | 259 sibling = tmp1; in ____rb_erase_color() 262 if (!tmp1 || rb_is_black(tmp1)) { in ____rb_erase_color() 319 tmp1 = tmp2->rb_right; in ____rb_erase_color() 323 if (tmp1) in ____rb_erase_color() 327 tmp1 = sibling; in ____rb_erase_color() 363 sibling = tmp1; in ____rb_erase_color() 365 tmp1 = sibling->rb_left; in ____rb_erase_color() 366 if (!tmp1 || rb_is_black(tmp1)) { in ____rb_erase_color() 383 tmp1 = tmp2->rb_left; in ____rb_erase_color() 387 if (tmp1) in ____rb_erase_color() [all …]
|
/linux/arch/ia64/lib/ |
A D | do_csum.S | 105 #define tmp1 r26 macro 161 and tmp1=7, tmp1 // make sure that if tmp1==8 -> tmp1=0 165 shl tmp1=tmp1,3 // number of bits 263 zxt4 tmp1=result1[0] 266 add result1[0]=tmp1,tmp2 268 and tmp1=result1[0],tmp3 271 add result1[0]=tmp1,tmp2 273 and tmp1=result1[0],tmp3 276 add result1[0]=tmp1,tmp2 278 and tmp1=result1[0],tmp3 [all …]
|
/linux/arch/arc/include/asm/ |
A D | uaccess.h | 172 unsigned long tmp1, tmp2, tmp3, tmp4; in raw_copy_from_user() local 271 "=r"(tmp1), "=r"(tmp2) in raw_copy_from_user() 291 : "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1) in raw_copy_from_user() 311 : "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1) in raw_copy_from_user() 329 : "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1) in raw_copy_from_user() 386 "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4) in raw_copy_from_user() 399 unsigned long tmp1, tmp2, tmp3, tmp4; in raw_copy_to_user() local 493 "=r"(tmp1), "=r"(tmp2) in raw_copy_to_user() 513 : "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1) in raw_copy_to_user() 533 : "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1) in raw_copy_to_user() [all …]
|
/linux/arch/x86/crypto/ |
A D | curve25519-x86_64.c | 773 u64 *a = tmp1; in point_add_and_double() 775 u64 *ab = tmp1; in point_add_and_double() 798 a1 = tmp1; in point_add_and_double() 799 b1 = tmp1 + (u32)4U; in point_add_and_double() 800 d = tmp1 + (u32)8U; in point_add_and_double() 801 c = tmp1 + (u32)12U; in point_add_and_double() 802 ab1 = tmp1; in point_add_and_double() 821 u64 *a = tmp1; in point_double() 825 u64 *ab = tmp1; in point_double() 857 u64 *tmp1; in montgomery_ladder() local [all …]
|
/linux/arch/arm/include/asm/ |
A D | tls.h | 10 .macro switch_tls_none, base, tp, tpuser, tmp1, tmp2 13 .macro switch_tls_v6k, base, tp, tpuser, tmp1, tmp2 20 .macro switch_tls_v6, base, tp, tpuser, tmp1, tmp2 21 ldr \tmp1, =elf_hwcap 22 ldr \tmp1, [\tmp1, #0] 24 tst \tmp1, #HWCAP_TLS @ hardware TLS available? 32 .macro switch_tls_software, base, tp, tpuser, tmp1, tmp2 33 mov \tmp1, #0xffff0fff 34 str \tp, [\tmp1, #-15] @ set TLS value at 0xffff0ff0
|
/linux/arch/alpha/kernel/ |
A D | traps.c | 438 long error, tmp1, tmp2, tmp3, tmp4; in do_entUna() local 461 : "=r"(error), "=&r"(tmp1), "=&r"(tmp2) in do_entUna() 465 una_reg(reg) = tmp1|tmp2; in do_entUna() 477 : "=r"(error), "=&r"(tmp1), "=&r"(tmp2) in do_entUna() 481 una_reg(reg) = (int)(tmp1|tmp2); in do_entUna() 497 una_reg(reg) = tmp1|tmp2; in do_entUna() 725 unsigned long tmp1, tmp2, tmp3, tmp4; in do_entUnaUser() local 788 *reg_addr = tmp1|tmp2; in do_entUnaUser() 820 alpha_write_fp_reg(reg, tmp1|tmp2); in do_entUnaUser() 836 *reg_addr = (int)(tmp1|tmp2); in do_entUnaUser() [all …]
|
/linux/arch/alpha/lib/ |
A D | divide.S | 58 #define tmp1 $3 macro 110 stq tmp1,24($30) 142 subq modulus,divisor,tmp1 145 cmovne compare,tmp1,modulus 151 ldq tmp1,24($30) 184 stq tmp1,24($30) 191 subq $31,$27,tmp1 194 cmovlt $28,tmp1,$27 195 ldq tmp1,24($30)
|
/linux/arch/m68k/lib/ |
A D | checksum.c | 40 unsigned long tmp1, tmp2; in csum_partial() local 118 "=&d" (tmp1), "=&d" (tmp2) in csum_partial() 139 unsigned long tmp1, tmp2; in csum_and_copy_from_user() local 261 "=&d" (tmp1), "=d" (tmp2) in csum_and_copy_from_user() 278 unsigned long tmp1, tmp2; in csum_partial_copy_nocheck() local 366 "=&d" (tmp1), "=&d" (tmp2) in csum_partial_copy_nocheck()
|
/linux/arch/arm/mach-iop32x/include/mach/ |
A D | entry-macro.S | 26 .macro arch_ret_to_user, tmp1, tmp2 27 mrc p15, 0, \tmp1, c15, c1, 0 28 ands \tmp2, \tmp1, #(1 << 6) 29 bicne \tmp1, \tmp1, #(1 << 6) 30 mcrne p15, 0, \tmp1, c15, c1, 0 @ Disable cp6 access
|
/linux/fs/nfsd/ |
A D | nfsxdr.c | 133 u32 tmp1, tmp2; in svcxdr_decode_sattr() local 146 tmp1 = be32_to_cpup(p++); in svcxdr_decode_sattr() 147 if (tmp1 != (u32)-1 && tmp1 != 0xffff) { in svcxdr_decode_sattr() 149 iap->ia_mode = tmp1; in svcxdr_decode_sattr() 152 tmp1 = be32_to_cpup(p++); in svcxdr_decode_sattr() 153 if (tmp1 != (u32)-1) { in svcxdr_decode_sattr() 159 tmp1 = be32_to_cpup(p++); in svcxdr_decode_sattr() 160 if (tmp1 != (u32)-1) { in svcxdr_decode_sattr() 166 tmp1 = be32_to_cpup(p++); in svcxdr_decode_sattr() 167 if (tmp1 != (u32)-1) { in svcxdr_decode_sattr() [all …]
|
/linux/arch/arm64/kernel/ |
A D | head.S | 138 add \tmp1, \tbl, #PAGE_SIZE 139 phys_to_pte \tmp2, \tmp1 141 lsr \tmp1, \virt, #\shift 143 and \tmp1, \tmp1, \ptrs // table index 166 orr \tmp1, \tmp1, \flags // tmp1 = table entry 405 ldr \tmp1, [\tsk, #TSK_STACK] 406 add sp, \tmp1, #THREAD_SIZE 414 adr_l \tmp1, __per_cpu_offset 416 ldr \tmp1, [\tmp1, \tmp2, lsl #3] 417 set_this_cpu_offset \tmp1 [all …]
|