copy_user.S 10 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486
  1. /* SPDX-License-Identifier: GPL-2.0 */
  2. /* copy_user.S: Sparc optimized copy_from_user and copy_to_user code.
  3. *
  4. * Copyright(C) 1995 Linus Torvalds
  5. * Copyright(C) 1996 David S. Miller
  6. * Copyright(C) 1996 Eddie C. Dost
  7. * Copyright(C) 1996,1998 Jakub Jelinek
  8. *
  9. * derived from:
  10. * e-mail between David and Eddie.
  11. *
  12. * Returns 0 if successful, otherwise count of bytes not copied yet
  13. */
  14. #include <asm/ptrace.h>
  15. #include <asm/asmmacro.h>
  16. #include <asm/page.h>
  17. #include <asm/thread_info.h>
  18. #include <asm/export.h>
  19. /* Work around cpp -rob */
  20. #define ALLOC #alloc
  21. #define EXECINSTR #execinstr
  22. #define EX(x,y,a,b) \
  23. 98: x,y; \
  24. .section .fixup,ALLOC,EXECINSTR; \
  25. .align 4; \
  26. 99: ba fixupretl; \
  27. a, b, %g3; \
  28. .section __ex_table,ALLOC; \
  29. .align 4; \
  30. .word 98b, 99b; \
  31. .text; \
  32. .align 4
  33. #define EX2(x,y,c,d,e,a,b) \
  34. 98: x,y; \
  35. .section .fixup,ALLOC,EXECINSTR; \
  36. .align 4; \
  37. 99: c, d, e; \
  38. ba fixupretl; \
  39. a, b, %g3; \
  40. .section __ex_table,ALLOC; \
  41. .align 4; \
  42. .word 98b, 99b; \
  43. .text; \
  44. .align 4
  45. #define EXO2(x,y) \
  46. 98: x, y; \
  47. .section __ex_table,ALLOC; \
  48. .align 4; \
  49. .word 98b, 97f; \
  50. .text; \
  51. .align 4
  52. #define EXT(start,end,handler) \
  53. .section __ex_table,ALLOC; \
  54. .align 4; \
  55. .word start, 0, end, handler; \
  56. .text; \
  57. .align 4
  58. /* Please do not change following macros unless you change logic used
  59. * in .fixup at the end of this file as well
  60. */
  61. /* Both these macros have to start with exactly the same insn */
  62. #define MOVE_BIGCHUNK(src, dst, offset, t0, t1, t2, t3, t4, t5, t6, t7) \
  63. ldd [%src + (offset) + 0x00], %t0; \
  64. ldd [%src + (offset) + 0x08], %t2; \
  65. ldd [%src + (offset) + 0x10], %t4; \
  66. ldd [%src + (offset) + 0x18], %t6; \
  67. st %t0, [%dst + (offset) + 0x00]; \
  68. st %t1, [%dst + (offset) + 0x04]; \
  69. st %t2, [%dst + (offset) + 0x08]; \
  70. st %t3, [%dst + (offset) + 0x0c]; \
  71. st %t4, [%dst + (offset) + 0x10]; \
  72. st %t5, [%dst + (offset) + 0x14]; \
  73. st %t6, [%dst + (offset) + 0x18]; \
  74. st %t7, [%dst + (offset) + 0x1c];
  75. #define MOVE_BIGALIGNCHUNK(src, dst, offset, t0, t1, t2, t3, t4, t5, t6, t7) \
  76. ldd [%src + (offset) + 0x00], %t0; \
  77. ldd [%src + (offset) + 0x08], %t2; \
  78. ldd [%src + (offset) + 0x10], %t4; \
  79. ldd [%src + (offset) + 0x18], %t6; \
  80. std %t0, [%dst + (offset) + 0x00]; \
  81. std %t2, [%dst + (offset) + 0x08]; \
  82. std %t4, [%dst + (offset) + 0x10]; \
  83. std %t6, [%dst + (offset) + 0x18];
  84. #define MOVE_LASTCHUNK(src, dst, offset, t0, t1, t2, t3) \
  85. ldd [%src - (offset) - 0x10], %t0; \
  86. ldd [%src - (offset) - 0x08], %t2; \
  87. st %t0, [%dst - (offset) - 0x10]; \
  88. st %t1, [%dst - (offset) - 0x0c]; \
  89. st %t2, [%dst - (offset) - 0x08]; \
  90. st %t3, [%dst - (offset) - 0x04];
  91. #define MOVE_HALFCHUNK(src, dst, offset, t0, t1, t2, t3) \
  92. lduh [%src + (offset) + 0x00], %t0; \
  93. lduh [%src + (offset) + 0x02], %t1; \
  94. lduh [%src + (offset) + 0x04], %t2; \
  95. lduh [%src + (offset) + 0x06], %t3; \
  96. sth %t0, [%dst + (offset) + 0x00]; \
  97. sth %t1, [%dst + (offset) + 0x02]; \
  98. sth %t2, [%dst + (offset) + 0x04]; \
  99. sth %t3, [%dst + (offset) + 0x06];
  100. #define MOVE_SHORTCHUNK(src, dst, offset, t0, t1) \
  101. ldub [%src - (offset) - 0x02], %t0; \
  102. ldub [%src - (offset) - 0x01], %t1; \
  103. stb %t0, [%dst - (offset) - 0x02]; \
  104. stb %t1, [%dst - (offset) - 0x01];
  105. .text
  106. .align 4
  107. .globl __copy_user_begin
  108. __copy_user_begin:
  109. .globl __copy_user
  110. EXPORT_SYMBOL(__copy_user)
  111. dword_align:
  112. andcc %o1, 1, %g0
  113. be 4f
  114. andcc %o1, 2, %g0
  115. EXO2(ldub [%o1], %g2)
  116. add %o1, 1, %o1
  117. EXO2(stb %g2, [%o0])
  118. sub %o2, 1, %o2
  119. bne 3f
  120. add %o0, 1, %o0
  121. EXO2(lduh [%o1], %g2)
  122. add %o1, 2, %o1
  123. EXO2(sth %g2, [%o0])
  124. sub %o2, 2, %o2
  125. b 3f
  126. add %o0, 2, %o0
  127. 4:
  128. EXO2(lduh [%o1], %g2)
  129. add %o1, 2, %o1
  130. EXO2(sth %g2, [%o0])
  131. sub %o2, 2, %o2
  132. b 3f
  133. add %o0, 2, %o0
  134. __copy_user: /* %o0=dst %o1=src %o2=len */
  135. xor %o0, %o1, %o4
  136. 1:
  137. andcc %o4, 3, %o5
  138. 2:
  139. bne cannot_optimize
  140. cmp %o2, 15
  141. bleu short_aligned_end
  142. andcc %o1, 3, %g0
  143. bne dword_align
  144. 3:
  145. andcc %o1, 4, %g0
  146. be 2f
  147. mov %o2, %g1
  148. EXO2(ld [%o1], %o4)
  149. sub %g1, 4, %g1
  150. EXO2(st %o4, [%o0])
  151. add %o1, 4, %o1
  152. add %o0, 4, %o0
  153. 2:
  154. andcc %g1, 0xffffff80, %g7
  155. be 3f
  156. andcc %o0, 4, %g0
  157. be ldd_std + 4
  158. 5:
  159. MOVE_BIGCHUNK(o1, o0, 0x00, o2, o3, o4, o5, g2, g3, g4, g5)
  160. MOVE_BIGCHUNK(o1, o0, 0x20, o2, o3, o4, o5, g2, g3, g4, g5)
  161. MOVE_BIGCHUNK(o1, o0, 0x40, o2, o3, o4, o5, g2, g3, g4, g5)
  162. MOVE_BIGCHUNK(o1, o0, 0x60, o2, o3, o4, o5, g2, g3, g4, g5)
  163. 80:
  164. EXT(5b, 80b, 50f)
  165. subcc %g7, 128, %g7
  166. add %o1, 128, %o1
  167. bne 5b
  168. add %o0, 128, %o0
  169. 3:
  170. andcc %g1, 0x70, %g7
  171. be copy_user_table_end
  172. andcc %g1, 8, %g0
  173. sethi %hi(copy_user_table_end), %o5
  174. srl %g7, 1, %o4
  175. add %g7, %o4, %o4
  176. add %o1, %g7, %o1
  177. sub %o5, %o4, %o5
  178. jmpl %o5 + %lo(copy_user_table_end), %g0
  179. add %o0, %g7, %o0
  180. copy_user_table:
  181. MOVE_LASTCHUNK(o1, o0, 0x60, g2, g3, g4, g5)
  182. MOVE_LASTCHUNK(o1, o0, 0x50, g2, g3, g4, g5)
  183. MOVE_LASTCHUNK(o1, o0, 0x40, g2, g3, g4, g5)
  184. MOVE_LASTCHUNK(o1, o0, 0x30, g2, g3, g4, g5)
  185. MOVE_LASTCHUNK(o1, o0, 0x20, g2, g3, g4, g5)
  186. MOVE_LASTCHUNK(o1, o0, 0x10, g2, g3, g4, g5)
  187. MOVE_LASTCHUNK(o1, o0, 0x00, g2, g3, g4, g5)
  188. copy_user_table_end:
  189. EXT(copy_user_table, copy_user_table_end, 51f)
  190. be copy_user_last7
  191. andcc %g1, 4, %g0
  192. EX(ldd [%o1], %g2, and %g1, 0xf)
  193. add %o0, 8, %o0
  194. add %o1, 8, %o1
  195. EX(st %g2, [%o0 - 0x08], and %g1, 0xf)
  196. EX2(st %g3, [%o0 - 0x04], and %g1, 0xf, %g1, sub %g1, 4)
  197. copy_user_last7:
  198. be 1f
  199. andcc %g1, 2, %g0
  200. EX(ld [%o1], %g2, and %g1, 7)
  201. add %o1, 4, %o1
  202. EX(st %g2, [%o0], and %g1, 7)
  203. add %o0, 4, %o0
  204. 1:
  205. be 1f
  206. andcc %g1, 1, %g0
  207. EX(lduh [%o1], %g2, and %g1, 3)
  208. add %o1, 2, %o1
  209. EX(sth %g2, [%o0], and %g1, 3)
  210. add %o0, 2, %o0
  211. 1:
  212. be 1f
  213. nop
  214. EX(ldub [%o1], %g2, add %g0, 1)
  215. EX(stb %g2, [%o0], add %g0, 1)
  216. 1:
  217. retl
  218. clr %o0
  219. ldd_std:
  220. MOVE_BIGALIGNCHUNK(o1, o0, 0x00, o2, o3, o4, o5, g2, g3, g4, g5)
  221. MOVE_BIGALIGNCHUNK(o1, o0, 0x20, o2, o3, o4, o5, g2, g3, g4, g5)
  222. MOVE_BIGALIGNCHUNK(o1, o0, 0x40, o2, o3, o4, o5, g2, g3, g4, g5)
  223. MOVE_BIGALIGNCHUNK(o1, o0, 0x60, o2, o3, o4, o5, g2, g3, g4, g5)
  224. 81:
  225. EXT(ldd_std, 81b, 52f)
  226. subcc %g7, 128, %g7
  227. add %o1, 128, %o1
  228. bne ldd_std
  229. add %o0, 128, %o0
  230. andcc %g1, 0x70, %g7
  231. be copy_user_table_end
  232. andcc %g1, 8, %g0
  233. sethi %hi(copy_user_table_end), %o5
  234. srl %g7, 1, %o4
  235. add %g7, %o4, %o4
  236. add %o1, %g7, %o1
  237. sub %o5, %o4, %o5
  238. jmpl %o5 + %lo(copy_user_table_end), %g0
  239. add %o0, %g7, %o0
  240. cannot_optimize:
  241. bleu short_end
  242. cmp %o5, 2
  243. bne byte_chunk
  244. and %o2, 0xfffffff0, %o3
  245. andcc %o1, 1, %g0
  246. be 10f
  247. nop
  248. EXO2(ldub [%o1], %g2)
  249. add %o1, 1, %o1
  250. EXO2(stb %g2, [%o0])
  251. sub %o2, 1, %o2
  252. andcc %o2, 0xfffffff0, %o3
  253. be short_end
  254. add %o0, 1, %o0
  255. 10:
  256. MOVE_HALFCHUNK(o1, o0, 0x00, g2, g3, g4, g5)
  257. MOVE_HALFCHUNK(o1, o0, 0x08, g2, g3, g4, g5)
  258. 82:
  259. EXT(10b, 82b, 53f)
  260. subcc %o3, 0x10, %o3
  261. add %o1, 0x10, %o1
  262. bne 10b
  263. add %o0, 0x10, %o0
  264. b 2f
  265. and %o2, 0xe, %o3
  266. byte_chunk:
  267. MOVE_SHORTCHUNK(o1, o0, -0x02, g2, g3)
  268. MOVE_SHORTCHUNK(o1, o0, -0x04, g2, g3)
  269. MOVE_SHORTCHUNK(o1, o0, -0x06, g2, g3)
  270. MOVE_SHORTCHUNK(o1, o0, -0x08, g2, g3)
  271. MOVE_SHORTCHUNK(o1, o0, -0x0a, g2, g3)
  272. MOVE_SHORTCHUNK(o1, o0, -0x0c, g2, g3)
  273. MOVE_SHORTCHUNK(o1, o0, -0x0e, g2, g3)
  274. MOVE_SHORTCHUNK(o1, o0, -0x10, g2, g3)
  275. 83:
  276. EXT(byte_chunk, 83b, 54f)
  277. subcc %o3, 0x10, %o3
  278. add %o1, 0x10, %o1
  279. bne byte_chunk
  280. add %o0, 0x10, %o0
  281. short_end:
  282. and %o2, 0xe, %o3
  283. 2:
  284. sethi %hi(short_table_end), %o5
  285. sll %o3, 3, %o4
  286. add %o0, %o3, %o0
  287. sub %o5, %o4, %o5
  288. add %o1, %o3, %o1
  289. jmpl %o5 + %lo(short_table_end), %g0
  290. andcc %o2, 1, %g0
  291. 84:
  292. MOVE_SHORTCHUNK(o1, o0, 0x0c, g2, g3)
  293. MOVE_SHORTCHUNK(o1, o0, 0x0a, g2, g3)
  294. MOVE_SHORTCHUNK(o1, o0, 0x08, g2, g3)
  295. MOVE_SHORTCHUNK(o1, o0, 0x06, g2, g3)
  296. MOVE_SHORTCHUNK(o1, o0, 0x04, g2, g3)
  297. MOVE_SHORTCHUNK(o1, o0, 0x02, g2, g3)
  298. MOVE_SHORTCHUNK(o1, o0, 0x00, g2, g3)
  299. short_table_end:
  300. EXT(84b, short_table_end, 55f)
  301. be 1f
  302. nop
  303. EX(ldub [%o1], %g2, add %g0, 1)
  304. EX(stb %g2, [%o0], add %g0, 1)
  305. 1:
  306. retl
  307. clr %o0
  308. short_aligned_end:
  309. bne short_end
  310. andcc %o2, 8, %g0
  311. be 1f
  312. andcc %o2, 4, %g0
  313. EXO2(ld [%o1 + 0x00], %g2)
  314. EXO2(ld [%o1 + 0x04], %g3)
  315. add %o1, 8, %o1
  316. EXO2(st %g2, [%o0 + 0x00])
  317. EX(st %g3, [%o0 + 0x04], sub %o2, 4)
  318. add %o0, 8, %o0
  319. 1:
  320. b copy_user_last7
  321. mov %o2, %g1
  322. .section .fixup,#alloc,#execinstr
  323. .align 4
  324. 97:
  325. mov %o2, %g3
  326. fixupretl:
  327. retl
  328. mov %g3, %o0
  329. /* exception routine sets %g2 to (broken_insn - first_insn)>>2 */
  330. 50:
  331. /* This magic counts how many bytes are left when crash in MOVE_BIGCHUNK
  332. * happens. This is derived from the amount ldd reads, st stores, etc.
  333. * x = g2 % 12;
  334. * g3 = g1 + g7 - ((g2 / 12) * 32 + (x < 4) ? 0 : (x - 4) * 4);
  335. * o0 += (g2 / 12) * 32;
  336. */
  337. cmp %g2, 12
  338. add %o0, %g7, %o0
  339. bcs 1f
  340. cmp %g2, 24
  341. bcs 2f
  342. cmp %g2, 36
  343. bcs 3f
  344. nop
  345. sub %g2, 12, %g2
  346. sub %g7, 32, %g7
  347. 3: sub %g2, 12, %g2
  348. sub %g7, 32, %g7
  349. 2: sub %g2, 12, %g2
  350. sub %g7, 32, %g7
  351. 1: cmp %g2, 4
  352. bcs,a 60f
  353. clr %g2
  354. sub %g2, 4, %g2
  355. sll %g2, 2, %g2
  356. 60: and %g1, 0x7f, %g3
  357. sub %o0, %g7, %o0
  358. add %g3, %g7, %g3
  359. ba fixupretl
  360. sub %g3, %g2, %g3
  361. 51:
  362. /* i = 41 - g2; j = i % 6;
  363. * g3 = (g1 & 15) + (i / 6) * 16 + (j < 4) ? (j + 1) * 4 : 16;
  364. * o0 -= (i / 6) * 16 + 16;
  365. */
  366. neg %g2
  367. and %g1, 0xf, %g1
  368. add %g2, 41, %g2
  369. add %o0, %g1, %o0
  370. 1: cmp %g2, 6
  371. bcs,a 2f
  372. cmp %g2, 4
  373. add %g1, 16, %g1
  374. b 1b
  375. sub %g2, 6, %g2
  376. 2: bcc,a 2f
  377. mov 16, %g2
  378. inc %g2
  379. sll %g2, 2, %g2
  380. 2: add %g1, %g2, %g3
  381. ba fixupretl
  382. sub %o0, %g3, %o0
  383. 52:
  384. /* g3 = g1 + g7 - (g2 / 8) * 32 + (g2 & 4) ? (g2 & 3) * 8 : 0;
  385. o0 += (g2 / 8) * 32 */
  386. andn %g2, 7, %g4
  387. add %o0, %g7, %o0
  388. andcc %g2, 4, %g0
  389. and %g2, 3, %g2
  390. sll %g4, 2, %g4
  391. sll %g2, 3, %g2
  392. bne 60b
  393. sub %g7, %g4, %g7
  394. ba 60b
  395. clr %g2
  396. 53:
  397. /* g3 = o3 + (o2 & 15) - (g2 & 8) - (g2 & 4) ? (g2 & 3) * 2 : 0;
  398. o0 += (g2 & 8) */
  399. and %g2, 3, %g4
  400. andcc %g2, 4, %g0
  401. and %g2, 8, %g2
  402. sll %g4, 1, %g4
  403. be 1f
  404. add %o0, %g2, %o0
  405. add %g2, %g4, %g2
  406. 1: and %o2, 0xf, %g3
  407. add %g3, %o3, %g3
  408. ba fixupretl
  409. sub %g3, %g2, %g3
  410. 54:
  411. /* g3 = o3 + (o2 & 15) - (g2 / 4) * 2 - (g2 & 2) ? (g2 & 1) : 0;
  412. o0 += (g2 / 4) * 2 */
  413. srl %g2, 2, %o4
  414. and %g2, 1, %o5
  415. srl %g2, 1, %g2
  416. add %o4, %o4, %o4
  417. and %o5, %g2, %o5
  418. and %o2, 0xf, %o2
  419. add %o0, %o4, %o0
  420. sub %o3, %o5, %o3
  421. sub %o2, %o4, %o2
  422. ba fixupretl
  423. add %o2, %o3, %g3
  424. 55:
  425. /* i = 27 - g2;
  426. g3 = (o2 & 1) + i / 4 * 2 + !(i & 3);
  427. o0 -= i / 4 * 2 + 1 */
  428. neg %g2
  429. and %o2, 1, %o2
  430. add %g2, 27, %g2
  431. srl %g2, 2, %o5
  432. andcc %g2, 3, %g0
  433. mov 1, %g2
  434. add %o5, %o5, %o5
  435. be,a 1f
  436. clr %g2
  437. 1: add %g2, %o5, %g3
  438. sub %o0, %g3, %o0
  439. ba fixupretl
  440. add %g3, %o2, %g3
  441. .globl __copy_user_end
  442. __copy_user_end: