123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321 |
- #include <linux/linkage.h>
- #include <asm/assembler.h>
- #include <asm/errno.h>
- #ifdef CONFIG_ISA_DUAL_ISSUE
-
- .text
- ENTRY(csum_partial)
-
-
-
-
- push r2 || ldi r2, #0
- and3 r7, r0, #1 ; Check alignment.
- beqz r7, 1f
-
- ldub r4, @r0 || addi r0, #1
-
- cmp r0, r0 || addi r1, #-1
- ldi r3, #0 || addx r2, r4
- addx r2, r3
- .fillinsn
- 1:
- and3 r4, r0, #2 ; Check alignment.
- beqz r4, 2f
-
- cmp r0, r0 || addi r1, #-2
- bgtz r1, 1f
- bra 4f || addi r1, #2
- .fillinsn
- 1:
-
- lduh r4, @r0 || ldi r3, #0
- addx r2, r4 || addi r0, #2
- addx r2, r3
- .fillinsn
- 2:
-
- cmp r0, r0
- srl3 r6, r1, #5
- beqz r6, 2f
- .fillinsn
- 1: ld r3, @r0+
- ld r4, @r0+
- ld r5, @r0+
- ld r3, @r0+ || addx r2, r3
- ld r4, @r0+ || addx r2, r4
- ld r5, @r0+ || addx r2, r5
- ld r3, @r0+ || addx r2, r3
- ld r4, @r0+ || addx r2, r4
- addx r2, r5 || addi r6, #-1
- addx r2, r3
- addx r2, r4
- bnez r6, 1b
- addx r2, r6
- cmp r0, r0
- .fillinsn
- 2: and3 r6, r1, #0x1c ; withdraw len
- beqz r6, 4f
- srli r6, #2
- .fillinsn
- 3: ld r4, @r0+ || addi r6, #-1
- addx r2, r4
- bnez r6, 3b
- addx r2, r6
- cmp r0, r0
- .fillinsn
- 4: and3 r1, r1, #3
- beqz r1, 7f
- and3 r6, r1, #2
- beqz r6, 5f
- lduh r4, @r0 || addi r0, #2
- addi r1, #-2 || slli r4, #16
- addx r2, r4
- beqz r1, 6f
- .fillinsn
- 5: ldub r4, @r0 || ldi r1, #0
- #ifndef __LITTLE_ENDIAN__
- slli r4, #8
- #endif
- addx r2, r4
- .fillinsn
- 6: addx r2, r1
- .fillinsn
- 7:
- and3 r0, r2, #0xffff
- srli r2, #16
- add r0, r2
- srl3 r2, r0, #16
- beqz r2, 1f
- addi r0, #1
- and3 r0, r0, #0xffff
- .fillinsn
- 1:
- beqz r7, 1f
- and3 r2, r0, #0xff
- srl3 r0, r0, #8
- slli r2, #8
- or r0, r2
- .fillinsn
- 1:
- pop r2 || cmp r0, r0
- addx r0, r2 || ldi r2, #0
- addx r0, r2
- jmp r14
- #else /* not CONFIG_ISA_DUAL_ISSUE */
-
- .text
- ENTRY(csum_partial)
-
-
-
-
- push r2
- ldi r2, #0
- and3 r7, r0, #1 ; Check alignment.
- beqz r7, 1f
-
- ldub r4, @r0
- addi r0, #1
- addi r1, #-1 ; Alignment uses up bytes.
- cmp r0, r0
- ldi r3, #0
- addx r2, r4
- addx r2, r3
- .fillinsn
- 1:
- and3 r4, r0, #2 ; Check alignment.
- beqz r4, 2f
- addi r1, #-2 ; Alignment uses up two bytes.
- cmp r0, r0
- bgtz r1, 1f
- addi r1, #2 ; len(r1) was < 2. Deal with it.
- bra 4f
- .fillinsn
- 1:
-
- lduh r4, @r0
- addi r0, #2
- ldi r3, #0
- addx r2, r4
- addx r2, r3
- .fillinsn
- 2:
-
- cmp r0, r0
- srl3 r6, r1, #5
- beqz r6, 2f
- .fillinsn
- 1: ld r3, @r0+
- ld r4, @r0+
- ld r5, @r0+
- addx r2, r3
- addx r2, r4
- addx r2, r5
- ld r3, @r0+
- ld r4, @r0+
- ld r5, @r0+
- addx r2, r3
- addx r2, r4
- addx r2, r5
- ld r3, @r0+
- ld r4, @r0+
- addi r6, #-1
- addx r2, r3
- addx r2, r4
- bnez r6, 1b
- addx r2, r6
- cmp r0, r0
- .fillinsn
- 2: and3 r6, r1, #0x1c ; withdraw len
- beqz r6, 4f
- srli r6, #2
- .fillinsn
- 3: ld r4, @r0+
- addi r6, #-1
- addx r2, r4
- bnez r6, 3b
- addx r2, r6
- cmp r0, r0
- .fillinsn
- 4: and3 r1, r1, #3
- beqz r1, 7f
- and3 r6, r1, #2
- beqz r6, 5f
- lduh r4, @r0
- addi r0, #2
- addi r1, #-2
- slli r4, #16
- addx r2, r4
- beqz r1, 6f
- .fillinsn
- 5: ldub r4, @r0
- #ifndef __LITTLE_ENDIAN__
- slli r4, #8
- #endif
- addx r2, r4
- .fillinsn
- 6: ldi r5, #0
- addx r2, r5
- .fillinsn
- 7:
- and3 r0, r2, #0xffff
- srli r2, #16
- add r0, r2
- srl3 r2, r0, #16
- beqz r2, 1f
- addi r0, #1
- and3 r0, r0, #0xffff
- .fillinsn
- 1:
- beqz r7, 1f
- mv r2, r0
- srl3 r0, r2, #8
- and3 r2, r2, #0xff
- slli r2, #8
- or r0, r2
- .fillinsn
- 1:
- pop r2
- cmp r0, r0
- addx r0, r2
- ldi r2, #0
- addx r0, r2
- jmp r14
- #endif /* not CONFIG_ISA_DUAL_ISSUE */
- ENTRY(csum_partial_copy_generic)
- nop
- nop
- nop
- nop
- jmp r14
- nop
- nop
- nop
- .end
|