uaccess.h 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758
  1. /*
  2. * Copyright (C) 2004, 2007-2010, 2011-2012 Synopsys, Inc. (www.synopsys.com)
  3. *
  4. * This program is free software; you can redistribute it and/or modify
  5. * it under the terms of the GNU General Public License version 2 as
  6. * published by the Free Software Foundation.
  7. *
  8. * vineetg: June 2010
  9. * -__clear_user( ) called multiple times during elf load was byte loop
  10. * converted to do as much word clear as possible.
  11. *
  12. * vineetg: Dec 2009
  13. * -Hand crafted constant propagation for "constant" copy sizes
  14. * -stock kernel shrunk by 33K at -O3
  15. *
  16. * vineetg: Sept 2009
  17. * -Added option to (UN)inline copy_(to|from)_user to reduce code sz
  18. * -kernel shrunk by 200K even at -O3 (gcc 4.2.1)
  19. * -Enabled when doing -Os
  20. *
  21. * Amit Bhor, Sameer Dhavale: Codito Technologies 2004
  22. */
  23. #ifndef _ASM_ARC_UACCESS_H
  24. #define _ASM_ARC_UACCESS_H
  25. #include <linux/sched.h>
  26. #include <asm/errno.h>
  27. #include <linux/string.h> /* for generic string functions */
  28. #define __kernel_ok (segment_eq(get_fs(), KERNEL_DS))
  29. /*
  30. * Algorthmically, for __user_ok() we want do:
  31. * (start < TASK_SIZE) && (start+len < TASK_SIZE)
  32. * where TASK_SIZE could either be retrieved from thread_info->addr_limit or
  33. * emitted directly in code.
  34. *
  35. * This can however be rewritten as follows:
  36. * (len <= TASK_SIZE) && (start+len < TASK_SIZE)
  37. *
  38. * Because it essentially checks if buffer end is within limit and @len is
  39. * non-ngeative, which implies that buffer start will be within limit too.
  40. *
  41. * The reason for rewriting being, for majority of cases, @len is generally
  42. * compile time constant, causing first sub-expression to be compile time
  43. * subsumed.
  44. *
  45. * The second part would generate weird large LIMMs e.g. (0x6000_0000 - 0x10),
  46. * so we check for TASK_SIZE using get_fs() since the addr_limit load from mem
  47. * would already have been done at this call site for __kernel_ok()
  48. *
  49. */
  50. #define __user_ok(addr, sz) (((sz) <= TASK_SIZE) && \
  51. ((addr) <= (get_fs() - (sz))))
  52. #define __access_ok(addr, sz) (unlikely(__kernel_ok) || \
  53. likely(__user_ok((addr), (sz))))
  54. /*********** Single byte/hword/word copies ******************/
  55. #define __get_user_fn(sz, u, k) \
  56. ({ \
  57. long __ret = 0; /* success by default */ \
  58. switch (sz) { \
  59. case 1: __arc_get_user_one(*(k), u, "ldb", __ret); break; \
  60. case 2: __arc_get_user_one(*(k), u, "ldw", __ret); break; \
  61. case 4: __arc_get_user_one(*(k), u, "ld", __ret); break; \
  62. case 8: __arc_get_user_one_64(*(k), u, __ret); break; \
  63. } \
  64. __ret; \
  65. })
  66. /*
  67. * Returns 0 on success, -EFAULT if not.
  68. * @ret already contains 0 - given that errors will be less likely
  69. * (hence +r asm constraint below).
  70. * In case of error, fixup code will make it -EFAULT
  71. */
  72. #define __arc_get_user_one(dst, src, op, ret) \
  73. __asm__ __volatile__( \
  74. "1: "op" %1,[%2]\n" \
  75. "2: ;nop\n" \
  76. " .section .fixup, \"ax\"\n" \
  77. " .align 4\n" \
  78. "3: # return -EFAULT\n" \
  79. " mov %0, %3\n" \
  80. " # zero out dst ptr\n" \
  81. " mov %1, 0\n" \
  82. " j 2b\n" \
  83. " .previous\n" \
  84. " .section __ex_table, \"a\"\n" \
  85. " .align 4\n" \
  86. " .word 1b,3b\n" \
  87. " .previous\n" \
  88. \
  89. : "+r" (ret), "=r" (dst) \
  90. : "r" (src), "ir" (-EFAULT))
  91. #define __arc_get_user_one_64(dst, src, ret) \
  92. __asm__ __volatile__( \
  93. "1: ld %1,[%2]\n" \
  94. "4: ld %R1,[%2, 4]\n" \
  95. "2: ;nop\n" \
  96. " .section .fixup, \"ax\"\n" \
  97. " .align 4\n" \
  98. "3: # return -EFAULT\n" \
  99. " mov %0, %3\n" \
  100. " # zero out dst ptr\n" \
  101. " mov %1, 0\n" \
  102. " mov %R1, 0\n" \
  103. " j 2b\n" \
  104. " .previous\n" \
  105. " .section __ex_table, \"a\"\n" \
  106. " .align 4\n" \
  107. " .word 1b,3b\n" \
  108. " .word 4b,3b\n" \
  109. " .previous\n" \
  110. \
  111. : "+r" (ret), "=r" (dst) \
  112. : "r" (src), "ir" (-EFAULT))
  113. #define __put_user_fn(sz, u, k) \
  114. ({ \
  115. long __ret = 0; /* success by default */ \
  116. switch (sz) { \
  117. case 1: __arc_put_user_one(*(k), u, "stb", __ret); break; \
  118. case 2: __arc_put_user_one(*(k), u, "stw", __ret); break; \
  119. case 4: __arc_put_user_one(*(k), u, "st", __ret); break; \
  120. case 8: __arc_put_user_one_64(*(k), u, __ret); break; \
  121. } \
  122. __ret; \
  123. })
  124. #define __arc_put_user_one(src, dst, op, ret) \
  125. __asm__ __volatile__( \
  126. "1: "op" %1,[%2]\n" \
  127. "2: ;nop\n" \
  128. " .section .fixup, \"ax\"\n" \
  129. " .align 4\n" \
  130. "3: mov %0, %3\n" \
  131. " j 2b\n" \
  132. " .previous\n" \
  133. " .section __ex_table, \"a\"\n" \
  134. " .align 4\n" \
  135. " .word 1b,3b\n" \
  136. " .previous\n" \
  137. \
  138. : "+r" (ret) \
  139. : "r" (src), "r" (dst), "ir" (-EFAULT))
  140. #define __arc_put_user_one_64(src, dst, ret) \
  141. __asm__ __volatile__( \
  142. "1: st %1,[%2]\n" \
  143. "4: st %R1,[%2, 4]\n" \
  144. "2: ;nop\n" \
  145. " .section .fixup, \"ax\"\n" \
  146. " .align 4\n" \
  147. "3: mov %0, %3\n" \
  148. " j 2b\n" \
  149. " .previous\n" \
  150. " .section __ex_table, \"a\"\n" \
  151. " .align 4\n" \
  152. " .word 1b,3b\n" \
  153. " .word 4b,3b\n" \
  154. " .previous\n" \
  155. \
  156. : "+r" (ret) \
  157. : "r" (src), "r" (dst), "ir" (-EFAULT))
  158. static inline unsigned long
  159. __arc_copy_from_user(void *to, const void __user *from, unsigned long n)
  160. {
  161. long res = 0;
  162. char val;
  163. unsigned long tmp1, tmp2, tmp3, tmp4;
  164. unsigned long orig_n = n;
  165. if (n == 0)
  166. return 0;
  167. /* unaligned */
  168. if (((unsigned long)to & 0x3) || ((unsigned long)from & 0x3)) {
  169. unsigned char tmp;
  170. __asm__ __volatile__ (
  171. " mov.f lp_count, %0 \n"
  172. " lpnz 2f \n"
  173. "1: ldb.ab %1, [%3, 1] \n"
  174. " stb.ab %1, [%2, 1] \n"
  175. " sub %0,%0,1 \n"
  176. "2: ;nop \n"
  177. " .section .fixup, \"ax\" \n"
  178. " .align 4 \n"
  179. "3: j 2b \n"
  180. " .previous \n"
  181. " .section __ex_table, \"a\" \n"
  182. " .align 4 \n"
  183. " .word 1b, 3b \n"
  184. " .previous \n"
  185. : "+r" (n),
  186. /*
  187. * Note as an '&' earlyclobber operand to make sure the
  188. * temporary register inside the loop is not the same as
  189. * FROM or TO.
  190. */
  191. "=&r" (tmp), "+r" (to), "+r" (from)
  192. :
  193. : "lp_count", "memory");
  194. return n;
  195. }
  196. /*
  197. * Hand-crafted constant propagation to reduce code sz of the
  198. * laddered copy 16x,8,4,2,1
  199. */
  200. if (__builtin_constant_p(orig_n)) {
  201. res = orig_n;
  202. if (orig_n / 16) {
  203. orig_n = orig_n % 16;
  204. __asm__ __volatile__(
  205. " lsr lp_count, %7,4 \n"
  206. " lp 3f \n"
  207. "1: ld.ab %3, [%2, 4] \n"
  208. "11: ld.ab %4, [%2, 4] \n"
  209. "12: ld.ab %5, [%2, 4] \n"
  210. "13: ld.ab %6, [%2, 4] \n"
  211. " st.ab %3, [%1, 4] \n"
  212. " st.ab %4, [%1, 4] \n"
  213. " st.ab %5, [%1, 4] \n"
  214. " st.ab %6, [%1, 4] \n"
  215. " sub %0,%0,16 \n"
  216. "3: ;nop \n"
  217. " .section .fixup, \"ax\" \n"
  218. " .align 4 \n"
  219. "4: j 3b \n"
  220. " .previous \n"
  221. " .section __ex_table, \"a\" \n"
  222. " .align 4 \n"
  223. " .word 1b, 4b \n"
  224. " .word 11b,4b \n"
  225. " .word 12b,4b \n"
  226. " .word 13b,4b \n"
  227. " .previous \n"
  228. : "+r" (res), "+r"(to), "+r"(from),
  229. "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4)
  230. : "ir"(n)
  231. : "lp_count", "memory");
  232. }
  233. if (orig_n / 8) {
  234. orig_n = orig_n % 8;
  235. __asm__ __volatile__(
  236. "14: ld.ab %3, [%2,4] \n"
  237. "15: ld.ab %4, [%2,4] \n"
  238. " st.ab %3, [%1,4] \n"
  239. " st.ab %4, [%1,4] \n"
  240. " sub %0,%0,8 \n"
  241. "31: ;nop \n"
  242. " .section .fixup, \"ax\" \n"
  243. " .align 4 \n"
  244. "4: j 31b \n"
  245. " .previous \n"
  246. " .section __ex_table, \"a\" \n"
  247. " .align 4 \n"
  248. " .word 14b,4b \n"
  249. " .word 15b,4b \n"
  250. " .previous \n"
  251. : "+r" (res), "+r"(to), "+r"(from),
  252. "=r"(tmp1), "=r"(tmp2)
  253. :
  254. : "memory");
  255. }
  256. if (orig_n / 4) {
  257. orig_n = orig_n % 4;
  258. __asm__ __volatile__(
  259. "16: ld.ab %3, [%2,4] \n"
  260. " st.ab %3, [%1,4] \n"
  261. " sub %0,%0,4 \n"
  262. "32: ;nop \n"
  263. " .section .fixup, \"ax\" \n"
  264. " .align 4 \n"
  265. "4: j 32b \n"
  266. " .previous \n"
  267. " .section __ex_table, \"a\" \n"
  268. " .align 4 \n"
  269. " .word 16b,4b \n"
  270. " .previous \n"
  271. : "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
  272. :
  273. : "memory");
  274. }
  275. if (orig_n / 2) {
  276. orig_n = orig_n % 2;
  277. __asm__ __volatile__(
  278. "17: ldw.ab %3, [%2,2] \n"
  279. " stw.ab %3, [%1,2] \n"
  280. " sub %0,%0,2 \n"
  281. "33: ;nop \n"
  282. " .section .fixup, \"ax\" \n"
  283. " .align 4 \n"
  284. "4: j 33b \n"
  285. " .previous \n"
  286. " .section __ex_table, \"a\" \n"
  287. " .align 4 \n"
  288. " .word 17b,4b \n"
  289. " .previous \n"
  290. : "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
  291. :
  292. : "memory");
  293. }
  294. if (orig_n & 1) {
  295. __asm__ __volatile__(
  296. "18: ldb.ab %3, [%2,2] \n"
  297. " stb.ab %3, [%1,2] \n"
  298. " sub %0,%0,1 \n"
  299. "34: ; nop \n"
  300. " .section .fixup, \"ax\" \n"
  301. " .align 4 \n"
  302. "4: j 34b \n"
  303. " .previous \n"
  304. " .section __ex_table, \"a\" \n"
  305. " .align 4 \n"
  306. " .word 18b,4b \n"
  307. " .previous \n"
  308. : "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
  309. :
  310. : "memory");
  311. }
  312. } else { /* n is NOT constant, so laddered copy of 16x,8,4,2,1 */
  313. __asm__ __volatile__(
  314. " mov %0,%3 \n"
  315. " lsr.f lp_count, %3,4 \n" /* 16x bytes */
  316. " lpnz 3f \n"
  317. "1: ld.ab %5, [%2, 4] \n"
  318. "11: ld.ab %6, [%2, 4] \n"
  319. "12: ld.ab %7, [%2, 4] \n"
  320. "13: ld.ab %8, [%2, 4] \n"
  321. " st.ab %5, [%1, 4] \n"
  322. " st.ab %6, [%1, 4] \n"
  323. " st.ab %7, [%1, 4] \n"
  324. " st.ab %8, [%1, 4] \n"
  325. " sub %0,%0,16 \n"
  326. "3: and.f %3,%3,0xf \n" /* stragglers */
  327. " bz 34f \n"
  328. " bbit0 %3,3,31f \n" /* 8 bytes left */
  329. "14: ld.ab %5, [%2,4] \n"
  330. "15: ld.ab %6, [%2,4] \n"
  331. " st.ab %5, [%1,4] \n"
  332. " st.ab %6, [%1,4] \n"
  333. " sub.f %0,%0,8 \n"
  334. "31: bbit0 %3,2,32f \n" /* 4 bytes left */
  335. "16: ld.ab %5, [%2,4] \n"
  336. " st.ab %5, [%1,4] \n"
  337. " sub.f %0,%0,4 \n"
  338. "32: bbit0 %3,1,33f \n" /* 2 bytes left */
  339. "17: ldw.ab %5, [%2,2] \n"
  340. " stw.ab %5, [%1,2] \n"
  341. " sub.f %0,%0,2 \n"
  342. "33: bbit0 %3,0,34f \n"
  343. "18: ldb.ab %5, [%2,1] \n" /* 1 byte left */
  344. " stb.ab %5, [%1,1] \n"
  345. " sub.f %0,%0,1 \n"
  346. "34: ;nop \n"
  347. " .section .fixup, \"ax\" \n"
  348. " .align 4 \n"
  349. "4: j 34b \n"
  350. " .previous \n"
  351. " .section __ex_table, \"a\" \n"
  352. " .align 4 \n"
  353. " .word 1b, 4b \n"
  354. " .word 11b,4b \n"
  355. " .word 12b,4b \n"
  356. " .word 13b,4b \n"
  357. " .word 14b,4b \n"
  358. " .word 15b,4b \n"
  359. " .word 16b,4b \n"
  360. " .word 17b,4b \n"
  361. " .word 18b,4b \n"
  362. " .previous \n"
  363. : "=r" (res), "+r"(to), "+r"(from), "+r"(n), "=r"(val),
  364. "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4)
  365. :
  366. : "lp_count", "memory");
  367. }
  368. return res;
  369. }
  370. extern unsigned long slowpath_copy_to_user(void __user *to, const void *from,
  371. unsigned long n);
  372. static inline unsigned long
  373. __arc_copy_to_user(void __user *to, const void *from, unsigned long n)
  374. {
  375. long res = 0;
  376. char val;
  377. unsigned long tmp1, tmp2, tmp3, tmp4;
  378. unsigned long orig_n = n;
  379. if (n == 0)
  380. return 0;
  381. /* unaligned */
  382. if (((unsigned long)to & 0x3) || ((unsigned long)from & 0x3)) {
  383. unsigned char tmp;
  384. __asm__ __volatile__(
  385. " mov.f lp_count, %0 \n"
  386. " lpnz 3f \n"
  387. " ldb.ab %1, [%3, 1] \n"
  388. "1: stb.ab %1, [%2, 1] \n"
  389. " sub %0, %0, 1 \n"
  390. "3: ;nop \n"
  391. " .section .fixup, \"ax\" \n"
  392. " .align 4 \n"
  393. "4: j 3b \n"
  394. " .previous \n"
  395. " .section __ex_table, \"a\" \n"
  396. " .align 4 \n"
  397. " .word 1b, 4b \n"
  398. " .previous \n"
  399. : "+r" (n),
  400. /* Note as an '&' earlyclobber operand to make sure the
  401. * temporary register inside the loop is not the same as
  402. * FROM or TO.
  403. */
  404. "=&r" (tmp), "+r" (to), "+r" (from)
  405. :
  406. : "lp_count", "memory");
  407. return n;
  408. }
  409. if (__builtin_constant_p(orig_n)) {
  410. res = orig_n;
  411. if (orig_n / 16) {
  412. orig_n = orig_n % 16;
  413. __asm__ __volatile__(
  414. " lsr lp_count, %7,4 \n"
  415. " lp 3f \n"
  416. " ld.ab %3, [%2, 4] \n"
  417. " ld.ab %4, [%2, 4] \n"
  418. " ld.ab %5, [%2, 4] \n"
  419. " ld.ab %6, [%2, 4] \n"
  420. "1: st.ab %3, [%1, 4] \n"
  421. "11: st.ab %4, [%1, 4] \n"
  422. "12: st.ab %5, [%1, 4] \n"
  423. "13: st.ab %6, [%1, 4] \n"
  424. " sub %0, %0, 16 \n"
  425. "3:;nop \n"
  426. " .section .fixup, \"ax\" \n"
  427. " .align 4 \n"
  428. "4: j 3b \n"
  429. " .previous \n"
  430. " .section __ex_table, \"a\" \n"
  431. " .align 4 \n"
  432. " .word 1b, 4b \n"
  433. " .word 11b,4b \n"
  434. " .word 12b,4b \n"
  435. " .word 13b,4b \n"
  436. " .previous \n"
  437. : "+r" (res), "+r"(to), "+r"(from),
  438. "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4)
  439. : "ir"(n)
  440. : "lp_count", "memory");
  441. }
  442. if (orig_n / 8) {
  443. orig_n = orig_n % 8;
  444. __asm__ __volatile__(
  445. " ld.ab %3, [%2,4] \n"
  446. " ld.ab %4, [%2,4] \n"
  447. "14: st.ab %3, [%1,4] \n"
  448. "15: st.ab %4, [%1,4] \n"
  449. " sub %0, %0, 8 \n"
  450. "31:;nop \n"
  451. " .section .fixup, \"ax\" \n"
  452. " .align 4 \n"
  453. "4: j 31b \n"
  454. " .previous \n"
  455. " .section __ex_table, \"a\" \n"
  456. " .align 4 \n"
  457. " .word 14b,4b \n"
  458. " .word 15b,4b \n"
  459. " .previous \n"
  460. : "+r" (res), "+r"(to), "+r"(from),
  461. "=r"(tmp1), "=r"(tmp2)
  462. :
  463. : "memory");
  464. }
  465. if (orig_n / 4) {
  466. orig_n = orig_n % 4;
  467. __asm__ __volatile__(
  468. " ld.ab %3, [%2,4] \n"
  469. "16: st.ab %3, [%1,4] \n"
  470. " sub %0, %0, 4 \n"
  471. "32:;nop \n"
  472. " .section .fixup, \"ax\" \n"
  473. " .align 4 \n"
  474. "4: j 32b \n"
  475. " .previous \n"
  476. " .section __ex_table, \"a\" \n"
  477. " .align 4 \n"
  478. " .word 16b,4b \n"
  479. " .previous \n"
  480. : "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
  481. :
  482. : "memory");
  483. }
  484. if (orig_n / 2) {
  485. orig_n = orig_n % 2;
  486. __asm__ __volatile__(
  487. " ldw.ab %3, [%2,2] \n"
  488. "17: stw.ab %3, [%1,2] \n"
  489. " sub %0, %0, 2 \n"
  490. "33:;nop \n"
  491. " .section .fixup, \"ax\" \n"
  492. " .align 4 \n"
  493. "4: j 33b \n"
  494. " .previous \n"
  495. " .section __ex_table, \"a\" \n"
  496. " .align 4 \n"
  497. " .word 17b,4b \n"
  498. " .previous \n"
  499. : "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
  500. :
  501. : "memory");
  502. }
  503. if (orig_n & 1) {
  504. __asm__ __volatile__(
  505. " ldb.ab %3, [%2,1] \n"
  506. "18: stb.ab %3, [%1,1] \n"
  507. " sub %0, %0, 1 \n"
  508. "34: ;nop \n"
  509. " .section .fixup, \"ax\" \n"
  510. " .align 4 \n"
  511. "4: j 34b \n"
  512. " .previous \n"
  513. " .section __ex_table, \"a\" \n"
  514. " .align 4 \n"
  515. " .word 18b,4b \n"
  516. " .previous \n"
  517. : "+r" (res), "+r"(to), "+r"(from), "=r"(tmp1)
  518. :
  519. : "memory");
  520. }
  521. } else { /* n is NOT constant, so laddered copy of 16x,8,4,2,1 */
  522. __asm__ __volatile__(
  523. " mov %0,%3 \n"
  524. " lsr.f lp_count, %3,4 \n" /* 16x bytes */
  525. " lpnz 3f \n"
  526. " ld.ab %5, [%2, 4] \n"
  527. " ld.ab %6, [%2, 4] \n"
  528. " ld.ab %7, [%2, 4] \n"
  529. " ld.ab %8, [%2, 4] \n"
  530. "1: st.ab %5, [%1, 4] \n"
  531. "11: st.ab %6, [%1, 4] \n"
  532. "12: st.ab %7, [%1, 4] \n"
  533. "13: st.ab %8, [%1, 4] \n"
  534. " sub %0, %0, 16 \n"
  535. "3: and.f %3,%3,0xf \n" /* stragglers */
  536. " bz 34f \n"
  537. " bbit0 %3,3,31f \n" /* 8 bytes left */
  538. " ld.ab %5, [%2,4] \n"
  539. " ld.ab %6, [%2,4] \n"
  540. "14: st.ab %5, [%1,4] \n"
  541. "15: st.ab %6, [%1,4] \n"
  542. " sub.f %0, %0, 8 \n"
  543. "31: bbit0 %3,2,32f \n" /* 4 bytes left */
  544. " ld.ab %5, [%2,4] \n"
  545. "16: st.ab %5, [%1,4] \n"
  546. " sub.f %0, %0, 4 \n"
  547. "32: bbit0 %3,1,33f \n" /* 2 bytes left */
  548. " ldw.ab %5, [%2,2] \n"
  549. "17: stw.ab %5, [%1,2] \n"
  550. " sub.f %0, %0, 2 \n"
  551. "33: bbit0 %3,0,34f \n"
  552. " ldb.ab %5, [%2,1] \n" /* 1 byte left */
  553. "18: stb.ab %5, [%1,1] \n"
  554. " sub.f %0, %0, 1 \n"
  555. "34: ;nop \n"
  556. " .section .fixup, \"ax\" \n"
  557. " .align 4 \n"
  558. "4: j 34b \n"
  559. " .previous \n"
  560. " .section __ex_table, \"a\" \n"
  561. " .align 4 \n"
  562. " .word 1b, 4b \n"
  563. " .word 11b,4b \n"
  564. " .word 12b,4b \n"
  565. " .word 13b,4b \n"
  566. " .word 14b,4b \n"
  567. " .word 15b,4b \n"
  568. " .word 16b,4b \n"
  569. " .word 17b,4b \n"
  570. " .word 18b,4b \n"
  571. " .previous \n"
  572. : "=r" (res), "+r"(to), "+r"(from), "+r"(n), "=r"(val),
  573. "=r"(tmp1), "=r"(tmp2), "=r"(tmp3), "=r"(tmp4)
  574. :
  575. : "lp_count", "memory");
  576. }
  577. return res;
  578. }
  579. static inline unsigned long __arc_clear_user(void __user *to, unsigned long n)
  580. {
  581. long res = n;
  582. unsigned char *d_char = to;
  583. __asm__ __volatile__(
  584. " bbit0 %0, 0, 1f \n"
  585. "75: stb.ab %2, [%0,1] \n"
  586. " sub %1, %1, 1 \n"
  587. "1: bbit0 %0, 1, 2f \n"
  588. "76: stw.ab %2, [%0,2] \n"
  589. " sub %1, %1, 2 \n"
  590. "2: asr.f lp_count, %1, 2 \n"
  591. " lpnz 3f \n"
  592. "77: st.ab %2, [%0,4] \n"
  593. " sub %1, %1, 4 \n"
  594. "3: bbit0 %1, 1, 4f \n"
  595. "78: stw.ab %2, [%0,2] \n"
  596. " sub %1, %1, 2 \n"
  597. "4: bbit0 %1, 0, 5f \n"
  598. "79: stb.ab %2, [%0,1] \n"
  599. " sub %1, %1, 1 \n"
  600. "5: \n"
  601. " .section .fixup, \"ax\" \n"
  602. " .align 4 \n"
  603. "3: j 5b \n"
  604. " .previous \n"
  605. " .section __ex_table, \"a\" \n"
  606. " .align 4 \n"
  607. " .word 75b, 3b \n"
  608. " .word 76b, 3b \n"
  609. " .word 77b, 3b \n"
  610. " .word 78b, 3b \n"
  611. " .word 79b, 3b \n"
  612. " .previous \n"
  613. : "+r"(d_char), "+r"(res)
  614. : "i"(0)
  615. : "lp_count", "memory");
  616. return res;
  617. }
  618. static inline long
  619. __arc_strncpy_from_user(char *dst, const char __user *src, long count)
  620. {
  621. long res = 0;
  622. char val;
  623. if (count == 0)
  624. return 0;
  625. __asm__ __volatile__(
  626. " mov lp_count, %5 \n"
  627. " lp 3f \n"
  628. "1: ldb.ab %3, [%2, 1] \n"
  629. " breq.d %3, 0, 3f \n"
  630. " stb.ab %3, [%1, 1] \n"
  631. " add %0, %0, 1 # Num of NON NULL bytes copied \n"
  632. "3: \n"
  633. " .section .fixup, \"ax\" \n"
  634. " .align 4 \n"
  635. "4: mov %0, %4 # sets @res as -EFAULT \n"
  636. " j 3b \n"
  637. " .previous \n"
  638. " .section __ex_table, \"a\" \n"
  639. " .align 4 \n"
  640. " .word 1b, 4b \n"
  641. " .previous \n"
  642. : "+r"(res), "+r"(dst), "+r"(src), "=r"(val)
  643. : "g"(-EFAULT), "r"(count)
  644. : "lp_count", "memory");
  645. return res;
  646. }
  647. static inline long __arc_strnlen_user(const char __user *s, long n)
  648. {
  649. long res, tmp1, cnt;
  650. char val;
  651. __asm__ __volatile__(
  652. " mov %2, %1 \n"
  653. "1: ldb.ab %3, [%0, 1] \n"
  654. " breq.d %3, 0, 2f \n"
  655. " sub.f %2, %2, 1 \n"
  656. " bnz 1b \n"
  657. " sub %2, %2, 1 \n"
  658. "2: sub %0, %1, %2 \n"
  659. "3: ;nop \n"
  660. " .section .fixup, \"ax\" \n"
  661. " .align 4 \n"
  662. "4: mov %0, 0 \n"
  663. " j 3b \n"
  664. " .previous \n"
  665. " .section __ex_table, \"a\" \n"
  666. " .align 4 \n"
  667. " .word 1b, 4b \n"
  668. " .previous \n"
  669. : "=r"(res), "=r"(tmp1), "=r"(cnt), "=r"(val)
  670. : "0"(s), "1"(n)
  671. : "memory");
  672. return res;
  673. }
  674. #ifndef CONFIG_CC_OPTIMIZE_FOR_SIZE
  675. #define __copy_from_user(t, f, n) __arc_copy_from_user(t, f, n)
  676. #define __copy_to_user(t, f, n) __arc_copy_to_user(t, f, n)
  677. #define __clear_user(d, n) __arc_clear_user(d, n)
  678. #define __strncpy_from_user(d, s, n) __arc_strncpy_from_user(d, s, n)
  679. #define __strnlen_user(s, n) __arc_strnlen_user(s, n)
  680. #else
  681. extern long arc_copy_from_user_noinline(void *to, const void __user * from,
  682. unsigned long n);
  683. extern long arc_copy_to_user_noinline(void __user *to, const void *from,
  684. unsigned long n);
  685. extern unsigned long arc_clear_user_noinline(void __user *to,
  686. unsigned long n);
  687. extern long arc_strncpy_from_user_noinline (char *dst, const char __user *src,
  688. long count);
  689. extern long arc_strnlen_user_noinline(const char __user *src, long n);
  690. #define __copy_from_user(t, f, n) arc_copy_from_user_noinline(t, f, n)
  691. #define __copy_to_user(t, f, n) arc_copy_to_user_noinline(t, f, n)
  692. #define __clear_user(d, n) arc_clear_user_noinline(d, n)
  693. #define __strncpy_from_user(d, s, n) arc_strncpy_from_user_noinline(d, s, n)
  694. #define __strnlen_user(s, n) arc_strnlen_user_noinline(s, n)
  695. #endif
  696. #include <asm-generic/uaccess.h>
  697. extern int fixup_exception(struct pt_regs *regs);
  698. #endif