123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740 |
- .if \mode == LEGACY_MODE
- 9: insn reg, addr; \
- .section __ex_table,"a"
- PTR 9b, handler
- .previous
- \
- .else
- \
- .if ((\from == USEROP) && (type == LD_INSN)) || \
- ((\to == USEROP) && (type == ST_INSN))
- 9: __BUILD_EVA_INSN(insn##e, reg, addr); \
- .section __ex_table,"a"
- PTR 9b, handler
- .previous
- .else
-
- \
- insn reg, addr; \
- .endif
- .endif
- .if \mode == LEGACY_MODE
- PREF(hint, addr); \
- .else
- .if ((\from == USEROP) && (type == SRC_PREFETCH)) || \
- ((\to == USEROP) && (type == DST_PREFETCH))
-
- \
- .set at=v1
- PREFE(hint, addr); \
- .set noat
- .else
- PREF(hint, addr); \
- .endif
- .endif
- .text
- .set noreorder
- .set noat
- .set at=v1
- .align 5
-
- .macro __BUILD_COPY_USER mode, from, to
-
- .ifnotdef __memcpy
- .set __memcpy, 1
- .hidden __memcpy
- .endif
-
- R10KCBARRIER(0(ra))
-
- PREFS( 0, 0(src) )
- PREFD( 1, 0(dst) )
- sltu t2, len, NBYTES
- and t1, dst, ADDRMASK
- PREFS( 0, 1*32(src) )
- PREFD( 1, 1*32(dst) )
- bnez t2, .Lcopy_bytes_checklen\@
- and t0, src, ADDRMASK
- PREFS( 0, 2*32(src) )
- PREFD( 1, 2*32(dst) )
- bnez t1, .Ldst_unaligned\@
- nop
- bnez t0, .Lsrc_unaligned_dst_aligned\@
- or t0, t0, t1
- bnez t0, .Lcopy_unaligned_bytes\@
-
- .Lboth_aligned\@:
- SRL t0, len, LOG_NBYTES+3
- beqz t0, .Lcleanup_both_aligned\@
- and rem, len, (8*NBYTES-1)
- PREFS( 0, 3*32(src) )
- PREFD( 1, 3*32(dst) )
- .align 4
- 1:
- R10KCBARRIER(0(ra))
- LOAD(t0, UNIT(0)(src), .Ll_exc\@)
- LOAD(t1, UNIT(1)(src), .Ll_exc_copy\@)
- LOAD(t2, UNIT(2)(src), .Ll_exc_copy\@)
- LOAD(t3, UNIT(3)(src), .Ll_exc_copy\@)
- SUB len, len, 8*NBYTES
- LOAD(t4, UNIT(4)(src), .Ll_exc_copy\@)
- LOAD(t7, UNIT(5)(src), .Ll_exc_copy\@)
- STORE(t0, UNIT(0)(dst), .Ls_exc_p8u\@)
- STORE(t1, UNIT(1)(dst), .Ls_exc_p7u\@)
- LOAD(t0, UNIT(6)(src), .Ll_exc_copy\@)
- LOAD(t1, UNIT(7)(src), .Ll_exc_copy\@)
- ADD src, src, 8*NBYTES
- ADD dst, dst, 8*NBYTES
- STORE(t2, UNIT(-6)(dst), .Ls_exc_p6u\@)
- STORE(t3, UNIT(-5)(dst), .Ls_exc_p5u\@)
- STORE(t4, UNIT(-4)(dst), .Ls_exc_p4u\@)
- STORE(t7, UNIT(-3)(dst), .Ls_exc_p3u\@)
- STORE(t0, UNIT(-2)(dst), .Ls_exc_p2u\@)
- STORE(t1, UNIT(-1)(dst), .Ls_exc_p1u\@)
- PREFS( 0, 8*32(src) )
- PREFD( 1, 8*32(dst) )
- bne len, rem, 1b
- nop
-
- .Lcleanup_both_aligned\@:
- beqz len, .Ldone\@
- sltu t0, len, 4*NBYTES
- bnez t0, .Lless_than_4units\@
- and rem, len, (NBYTES-1)
-
- LOAD( t0, UNIT(0)(src), .Ll_exc\@)
- LOAD( t1, UNIT(1)(src), .Ll_exc_copy\@)
- LOAD( t2, UNIT(2)(src), .Ll_exc_copy\@)
- LOAD( t3, UNIT(3)(src), .Ll_exc_copy\@)
- SUB len, len, 4*NBYTES
- ADD src, src, 4*NBYTES
- R10KCBARRIER(0(ra))
- STORE(t0, UNIT(0)(dst), .Ls_exc_p4u\@)
- STORE(t1, UNIT(1)(dst), .Ls_exc_p3u\@)
- STORE(t2, UNIT(2)(dst), .Ls_exc_p2u\@)
- STORE(t3, UNIT(3)(dst), .Ls_exc_p1u\@)
- .set reorder
- ADD dst, dst, 4*NBYTES
- beqz len, .Ldone\@
- .set noreorder
- .Lless_than_4units\@:
-
- beq rem, len, .Lcopy_bytes\@
- nop
- 1:
- R10KCBARRIER(0(ra))
- LOAD(t0, 0(src), .Ll_exc\@)
- ADD src, src, NBYTES
- SUB len, len, NBYTES
- STORE(t0, 0(dst), .Ls_exc_p1u\@)
- .set reorder
- ADD dst, dst, NBYTES
- bne rem, len, 1b
- .set noreorder
-
- beqz len, .Ldone\@
- ADD t1, dst, len
- li bits, 8*NBYTES
- SLL rem, len, 3
- LOAD(t0, 0(src), .Ll_exc\@)
- SUB bits, bits, rem
- SHIFT_DISCARD t0, t0, bits
- STREST(t0, -1(t1), .Ls_exc\@)
- jr ra
- move len, zero
- .Ldst_unaligned\@:
-
- LDFIRST(t3, FIRST(0)(src), .Ll_exc\@)
- ADD t2, zero, NBYTES
- LDREST(t3, REST(0)(src), .Ll_exc_copy\@)
- SUB t2, t2, t1
- xor match, t0, t1
- R10KCBARRIER(0(ra))
- STFIRST(t3, FIRST(0)(dst), .Ls_exc\@)
- beq len, t2, .Ldone\@
- SUB len, len, t2
- ADD dst, dst, t2
- beqz match, .Lboth_aligned\@
- ADD src, src, t2
- .Lsrc_unaligned_dst_aligned\@:
- SRL t0, len, LOG_NBYTES+2
- PREFS( 0, 3*32(src) )
- beqz t0, .Lcleanup_src_unaligned\@
- and rem, len, (4*NBYTES-1)
- PREFD( 1, 3*32(dst) )
- 1:
- R10KCBARRIER(0(ra))
- LDFIRST(t0, FIRST(0)(src), .Ll_exc\@)
- LDFIRST(t1, FIRST(1)(src), .Ll_exc_copy\@)
- SUB len, len, 4*NBYTES
- LDREST(t0, REST(0)(src), .Ll_exc_copy\@)
- LDREST(t1, REST(1)(src), .Ll_exc_copy\@)
- LDFIRST(t2, FIRST(2)(src), .Ll_exc_copy\@)
- LDFIRST(t3, FIRST(3)(src), .Ll_exc_copy\@)
- LDREST(t2, REST(2)(src), .Ll_exc_copy\@)
- LDREST(t3, REST(3)(src), .Ll_exc_copy\@)
- PREFS( 0, 9*32(src) )
- ADD src, src, 4*NBYTES
- nop
- STORE(t0, UNIT(0)(dst), .Ls_exc_p4u\@)
- STORE(t1, UNIT(1)(dst), .Ls_exc_p3u\@)
- STORE(t2, UNIT(2)(dst), .Ls_exc_p2u\@)
- STORE(t3, UNIT(3)(dst), .Ls_exc_p1u\@)
- PREFD( 1, 9*32(dst) )
- .set reorder
- ADD dst, dst, 4*NBYTES
- bne len, rem, 1b
- .set noreorder
- .Lcleanup_src_unaligned\@:
- beqz len, .Ldone\@
- and rem, len, NBYTES-1
- beq rem, len, .Lcopy_bytes\@
- nop
- 1:
- R10KCBARRIER(0(ra))
- LDFIRST(t0, FIRST(0)(src), .Ll_exc\@)
- LDREST(t0, REST(0)(src), .Ll_exc_copy\@)
- ADD src, src, NBYTES
- SUB len, len, NBYTES
- STORE(t0, 0(dst), .Ls_exc_p1u\@)
- .set reorder
- ADD dst, dst, NBYTES
- bne len, rem, 1b
- .set noreorder
- .Lcopy_bytes_checklen\@:
- beqz len, .Ldone\@
- nop
- .Lcopy_bytes\@:
-
- R10KCBARRIER(0(ra))
- LOADB(t0, N(src), .Ll_exc\@); \
- SUB len, len, 1
- beqz len, .Ldone\@
- STOREB(t0, N(dst), .Ls_exc_p1\@)
- COPY_BYTE(0)
- COPY_BYTE(1)
- COPY_BYTE(2)
- COPY_BYTE(3)
- COPY_BYTE(4)
- COPY_BYTE(5)
- LOADB(t0, NBYTES-2(src), .Ll_exc\@)
- SUB len, len, 1
- jr ra
- STOREB(t0, NBYTES-2(dst), .Ls_exc_p1\@)
- .Ldone\@:
- jr ra
- nop
- .Lcopy_unaligned_bytes\@:
- 1:
- COPY_BYTE(0)
- COPY_BYTE(1)
- COPY_BYTE(2)
- COPY_BYTE(3)
- COPY_BYTE(4)
- COPY_BYTE(5)
- COPY_BYTE(6)
- COPY_BYTE(7)
- ADD src, src, 8
- b 1b
- ADD dst, dst, 8
- .if __memcpy == 1
- END(memcpy)
- .set __memcpy, 0
- .hidden __memcpy
- .endif
- .Ll_exc_copy\@:
-
- LOADK t0, TI_TASK($28)
- nop
- LOADK t0, THREAD_BUADDR(t0)
- 1:
- LOADB(t1, 0(src), .Ll_exc\@)
- ADD src, src, 1
- sb t1, 0(dst)
- .set reorder
- ADD dst, dst, 1
- bne src, t0, 1b
- .set noreorder
- .Ll_exc\@:
- LOADK t0, TI_TASK($28)
- nop
- LOADK t0, THREAD_BUADDR(t0)
- nop
- SUB len, AT, t0
- bnez t6, .Ldone\@
-
- ADD dst, t0
- SUB dst, src
-
- .set reorder
- SUB src, len, 1
- beqz len, .Ldone\@
- .set noreorder
- 1: sb zero, 0(dst)
- ADD dst, dst, 1
- bnez src, 1b
- SUB src, src, 1
- .set push
- .set noat
- li v1, 1
- bnez src, 1b
- SUB src, src, v1
- .set pop
- jr ra
- nop
- .set reorder
- .Ls_exc_p
- ADD len, len, n*NBYTES
- jr ra
- .set noreorder
- SEXC(8)
- SEXC(7)
- SEXC(6)
- SEXC(5)
- SEXC(4)
- SEXC(3)
- SEXC(2)
- SEXC(1)
- .Ls_exc_p1\@:
- .set reorder
- ADD len, len, 1
- jr ra
- .set noreorder
- .Ls_exc\@:
- jr ra
- nop
- .endm
- .align 5
- LEAF(memmove)
- ADD t0, a0, a2
- ADD t1, a1, a2
- sltu t0, a1, t0
- sltu t1, a0, t1
- and t0, t1
- beqz t0, .L__memcpy
- move v0, a0
- beqz a2, .Lr_out
- END(memmove)
-
- LEAF(__rmemcpy)
- sltu t0, a1, a0
- beqz t0, .Lr_end_bytes_up
- nop
- ADD a0, a2
- ADD a1, a2
- .Lr_end_bytes:
- R10KCBARRIER(0(ra))
- lb t0, -1(a1)
- SUB a2, a2, 0x1
- sb t0, -1(a0)
- SUB a1, a1, 0x1
- .set reorder
- SUB a0, a0, 0x1
- bnez a2, .Lr_end_bytes
- .set noreorder
- .Lr_out:
- jr ra
- move a2, zero
- .Lr_end_bytes_up:
- R10KCBARRIER(0(ra))
- lb t0, (a1)
- SUB a2, a2, 0x1
- sb t0, (a0)
- ADD a1, a1, 0x1
- .set reorder
- ADD a0, a0, 0x1
- bnez a2, .Lr_end_bytes_up
- .set noreorder
- jr ra
- move a2, zero
- END(__rmemcpy)
- LEAF(__copy_user_inatomic)
- b __copy_user_common
- li t6, 1
- END(__copy_user_inatomic)
- .align 5
- LEAF(memcpy)
- move v0, dst
- .L__memcpy:
- FEXPORT(__copy_user)
- li t6, 0
- __copy_user_common:
-
- __BUILD_COPY_USER LEGACY_MODE USEROP USEROP
- LEAF(__copy_user_inatomic_eva)
- b __copy_from_user_common
- li t6, 1
- END(__copy_user_inatomic_eva)
- LEAF(__copy_from_user_eva)
- li t6, 0
- __copy_from_user_common:
- __BUILD_COPY_USER EVA_MODE USEROP KERNELOP
- END(__copy_from_user_eva)
- LEAF(__copy_to_user_eva)
- __BUILD_COPY_USER EVA_MODE KERNELOP USEROP
- END(__copy_to_user_eva)
- LEAF(__copy_in_user_eva)
- __BUILD_COPY_USER EVA_MODE USEROP USEROP
- END(__copy_in_user_eva)
|