/sysdeps/unix/sysv/linux/powerpc/powerpc32/ |
A D | clone.S | 34 cmpwi cr0,r4,0 36 cror cr0*4+eq,cr1*4+eq,cr0*4+eq 37 beq- cr0,L(badargs) 72 crandc cr1*4+eq,cr1*4+eq,cr0*4+so
|
A D | swapcontext.S | 38 bso- cr0,1f
|
A D | setcontext.S | 40 bso- cr0,1f
|
A D | getcontext.S | 39 bso- cr0,1f
|
/sysdeps/powerpc/powerpc64/le/power10/ |
A D | memcmp.S | 86 crnand 4*cr0+lt,4*cr0+gt,4*cr6+eq 87 bt 4*cr0+lt,L(tail1) 96 crnand 4*cr0+lt,4*cr0+gt,4*cr6+eq 97 bt 4*cr0+lt,L(tail2) 106 crnand 4*cr0+lt,4*cr0+gt,4*cr6+eq 107 bt 4*cr0+lt,L(tail3)
|
/sysdeps/unix/sysv/linux/powerpc/powerpc64/ |
A D | clone.S | 35 cmpdi cr0,r4,0 37 cror cr0*4+eq,cr1*4+eq,cr0*4+eq 38 beq- cr0,L(badargs) 99 crandc cr1*4+eq,cr1*4+eq,cr0*4+so
|
/sysdeps/powerpc/powerpc64/power8/ |
A D | strcmp.S | 59 bne cr0,L(different_nocmpb) 66 bne cr0,L(different_nocmpb) 83 bne cr0,L(loop_diff_align) 93 bne cr0,L(different_nocmpb) 102 bne cr0,L(different_nocmpb) 109 bne cr0,L(different_nocmpb) 116 bne cr0,L(different_nocmpb) 219 bne cr0,L(different_nocmpb)
|
A D | strncmp.S | 38 beq cr0,L(ret0) 61 bne cr0,L(different1) 75 bne cr0,L(different0) 98 beq cr0,L(loop_eq_align_0) 113 bne cr0,L(different1) 271 bne cr0,L(different1) 293 beq cr0,L(loop_eq_align_1)
|
A D | strncpy.S | 129 cmpdi cr0,r5,1 132 beq cr0,L(short_path_loop_end_0) 189 bne cr0,L(short_path_loop) /* Check if the total number of writes 385 bne cr0,L(short_path_prepare_2_3) 412 bne cr0,L(short_path_prepare_2_2) 428 bne cr0,L(short_path_2)
|
A D | memchr.S | 57 beq cr0, L(align_qw) 72 beq cr0, L(align_qw) 100 beq cr0, L(preloop_64B) 111 beq cr0, L(preloop_64B) 120 beq cr0, L(preloop_64B)
|
A D | strspn.S | 94 cmpldi cr0, r9, 0 104 beq cr0, L(start_cmp)
|
A D | memrchr.S | 63 beq cr0, L(align_qw) 97 beq cr0, L(preloop_64B) 108 beq cr0, L(preloop_64B) 117 beq cr0, L(preloop_64B)
|
A D | strnlen.S | 81 bne cr0, L(early_find) /* If found null bytes. */ 114 beq cr0,L(preloop_64B) /* If it is already 64B aligned. */ 123 beq cr0,L(preloop_64B) 131 beq cr0,L(preloop_64B)
|
A D | strlen.S | 106 beq cr0,L(preloop) 117 beq cr0,L(preloop) 128 beq cr0,L(preloop)
|
/sysdeps/powerpc/powerpc64/multiarch/ |
A D | memcpy-power8-cached.S | 33 beq cr0,1f 40 beq cr0,2f 47 beq cr0,3f 54 beqlr cr0 69 beq cr0,L(dst_is_align_16)
|
/sysdeps/powerpc/powerpc64/le/power9/ |
A D | strncmp.S | 72 cmpdi cr0, r5, 0 73 beq cr0, L(ret0) 98 bne cr0, L(different1) 112 bne cr0, L(different1) 127 bne cr0, L(different1) 138 bne cr0, L(different1) 154 beq cr0, L(aligned) 159 beq cr0, L(s1_align) 179 beq cr0, L(aligned)
|
A D | strcmp.S | 92 bne cr0, L(different_nocmpb) 99 bne cr0, L(different_nocmpb) 111 beq cr0, L(aligned) 113 beq cr0, L(s1_align) 131 beq cr0, L(aligned)
|
/sysdeps/powerpc/powerpc32/ |
A D | lshift.S | 27 cmplwi cr0,r5,16 # is size < 16 34 bge cr0,L(big) # branch if size >= 16 63 bltl- cr0,L(boom) # Never taken, only used to set LR.
|
/sysdeps/powerpc/powerpc64/a2/ |
A D | memcpy.S | 64 cmpldi cr0,r8,0 /* Were we aligned on a 16 byte bdy? */ 123 cmpdi cr0,r9,0 /* Cache line size set? */ 124 bne+ cr0,L(cachelineset) 156 cmpdi cr0,r9,128 162 beq- cr0,L(big_lines) /* 128 byte line code */ 307 cmpldi cr0,r5,16 321 cmpldi cr0,r5,0 /* copy remaining bytes (0-15) */
|
/sysdeps/powerpc/powerpc32/cell/ |
A D | memcpy.S | 54 cmplwi cr0,r8,0 200 cmplwi cr0,r5,16 213 cmplwi cr0,r5,0 /* copy remaining bytes (0-15) */
|
/sysdeps/powerpc/powerpc64/cell/ |
A D | memcpy.S | 58 cmpldi cr0,r8,0 204 cmpldi cr0,r5,16 217 cmpldi cr0,r5,0 /* copy remaining bytes (0-15) */
|
/sysdeps/powerpc/powerpc32/a2/ |
A D | memcpy.S | 49 cmplwi cr0,r8,0 /* Were we aligned on a 16 byte bdy? */ 158 cmpwi cr0,r9,128 164 beq- cr0,L(big_lines) /* 128 byte line code */ 307 cmplwi cr0,r5,16 320 cmplwi cr0,r5,0 /* copy remaining bytes (0-15) */
|
/sysdeps/powerpc/ |
A D | sysdep.h | 102 #define cr0 0 macro
|
/sysdeps/powerpc/powerpc64/ |
A D | mul_1.S | 43 beq cr0, L(b00) 48 mulld cr0, r26, VL
|
/sysdeps/unix/sysv/linux/arm/ |
A D | getcontext.S | 78 mrc p10, 7, r1, cr1, cr0, 0
|