bpf_jit.S 3.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155
  1. /* bpf_jit.S : BPF JIT helper functions
  2. *
  3. * Copyright (C) 2011 Eric Dumazet (eric.dumazet@gmail.com)
  4. *
  5. * This program is free software; you can redistribute it and/or
  6. * modify it under the terms of the GNU General Public License
  7. * as published by the Free Software Foundation; version 2
  8. * of the License.
  9. */
  10. #include <linux/linkage.h>
  11. #include <asm/frame.h>
  12. /*
  13. * Calling convention :
  14. * rbx : skb pointer (callee saved)
  15. * esi : offset of byte(s) to fetch in skb (can be scratched)
  16. * r10 : copy of skb->data
  17. * r9d : hlen = skb->len - skb->data_len
  18. */
  19. #define SKBDATA %r10
  20. #define SKF_MAX_NEG_OFF $(-0x200000) /* SKF_LL_OFF from filter.h */
  21. #define FUNC(name) \
  22. .globl name; \
  23. .type name, @function; \
  24. name:
  25. FUNC(sk_load_word)
  26. test %esi,%esi
  27. js bpf_slow_path_word_neg
  28. FUNC(sk_load_word_positive_offset)
  29. mov %r9d,%eax # hlen
  30. sub %esi,%eax # hlen - offset
  31. cmp $3,%eax
  32. jle bpf_slow_path_word
  33. mov (SKBDATA,%rsi),%eax
  34. bswap %eax /* ntohl() */
  35. ret
  36. FUNC(sk_load_half)
  37. test %esi,%esi
  38. js bpf_slow_path_half_neg
  39. FUNC(sk_load_half_positive_offset)
  40. mov %r9d,%eax
  41. sub %esi,%eax # hlen - offset
  42. cmp $1,%eax
  43. jle bpf_slow_path_half
  44. movzwl (SKBDATA,%rsi),%eax
  45. rol $8,%ax # ntohs()
  46. ret
  47. FUNC(sk_load_byte)
  48. test %esi,%esi
  49. js bpf_slow_path_byte_neg
  50. FUNC(sk_load_byte_positive_offset)
  51. cmp %esi,%r9d /* if (offset >= hlen) goto bpf_slow_path_byte */
  52. jle bpf_slow_path_byte
  53. movzbl (SKBDATA,%rsi),%eax
  54. ret
  55. /* rsi contains offset and can be scratched */
  56. #define bpf_slow_path_common(LEN) \
  57. lea 32(%rbp), %rdx;\
  58. FRAME_BEGIN; \
  59. mov %rbx, %rdi; /* arg1 == skb */ \
  60. push %r9; \
  61. push SKBDATA; \
  62. /* rsi already has offset */ \
  63. mov $LEN,%ecx; /* len */ \
  64. call skb_copy_bits; \
  65. test %eax,%eax; \
  66. pop SKBDATA; \
  67. pop %r9; \
  68. FRAME_END
  69. bpf_slow_path_word:
  70. bpf_slow_path_common(4)
  71. js bpf_error
  72. mov 32(%rbp),%eax
  73. bswap %eax
  74. ret
  75. bpf_slow_path_half:
  76. bpf_slow_path_common(2)
  77. js bpf_error
  78. mov 32(%rbp),%ax
  79. rol $8,%ax
  80. movzwl %ax,%eax
  81. ret
  82. bpf_slow_path_byte:
  83. bpf_slow_path_common(1)
  84. js bpf_error
  85. movzbl 32(%rbp),%eax
  86. ret
  87. #define sk_negative_common(SIZE) \
  88. FRAME_BEGIN; \
  89. mov %rbx, %rdi; /* arg1 == skb */ \
  90. push %r9; \
  91. push SKBDATA; \
  92. /* rsi already has offset */ \
  93. mov $SIZE,%edx; /* size */ \
  94. call bpf_internal_load_pointer_neg_helper; \
  95. test %rax,%rax; \
  96. pop SKBDATA; \
  97. pop %r9; \
  98. FRAME_END; \
  99. jz bpf_error
  100. bpf_slow_path_word_neg:
  101. cmp SKF_MAX_NEG_OFF, %esi /* test range */
  102. jl bpf_error /* offset lower -> error */
  103. FUNC(sk_load_word_negative_offset)
  104. sk_negative_common(4)
  105. mov (%rax), %eax
  106. bswap %eax
  107. ret
  108. bpf_slow_path_half_neg:
  109. cmp SKF_MAX_NEG_OFF, %esi
  110. jl bpf_error
  111. FUNC(sk_load_half_negative_offset)
  112. sk_negative_common(2)
  113. mov (%rax),%ax
  114. rol $8,%ax
  115. movzwl %ax,%eax
  116. ret
  117. bpf_slow_path_byte_neg:
  118. cmp SKF_MAX_NEG_OFF, %esi
  119. jl bpf_error
  120. FUNC(sk_load_byte_negative_offset)
  121. sk_negative_common(1)
  122. movzbl (%rax), %eax
  123. ret
  124. bpf_error:
  125. # force a return 0 from jit handler
  126. xor %eax,%eax
  127. mov (%rbp),%rbx
  128. mov 8(%rbp),%r13
  129. mov 16(%rbp),%r14
  130. mov 24(%rbp),%r15
  131. add $40, %rbp
  132. leaveq
  133. ret