uaccess.h 2.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100
  1. /*
  2. * Copyright (C) 2011 Texas Instruments Incorporated
  3. * Author: Mark Salter <msalter@redhat.com>
  4. *
  5. * This program is free software; you can redistribute it and/or modify
  6. * it under the terms of the GNU General Public License version 2 as
  7. * published by the Free Software Foundation.
  8. */
  9. #ifndef _ASM_C6X_UACCESS_H
  10. #define _ASM_C6X_UACCESS_H
  11. #include <linux/types.h>
  12. #include <linux/compiler.h>
  13. #include <linux/string.h>
  14. /*
  15. * C6X supports unaligned 32 and 64 bit loads and stores.
  16. */
  17. static inline __must_check unsigned long
  18. raw_copy_from_user(void *to, const void __user *from, unsigned long n)
  19. {
  20. u32 tmp32;
  21. u64 tmp64;
  22. if (__builtin_constant_p(n)) {
  23. switch (n) {
  24. case 1:
  25. *(u8 *)to = *(u8 __force *)from;
  26. return 0;
  27. case 4:
  28. asm volatile ("ldnw .d1t1 *%2,%0\n"
  29. "nop 4\n"
  30. "stnw .d1t1 %0,*%1\n"
  31. : "=&a"(tmp32)
  32. : "A"(to), "a"(from)
  33. : "memory");
  34. return 0;
  35. case 8:
  36. asm volatile ("ldndw .d1t1 *%2,%0\n"
  37. "nop 4\n"
  38. "stndw .d1t1 %0,*%1\n"
  39. : "=&a"(tmp64)
  40. : "a"(to), "a"(from)
  41. : "memory");
  42. return 0;
  43. default:
  44. break;
  45. }
  46. }
  47. memcpy(to, (const void __force *)from, n);
  48. return 0;
  49. }
  50. static inline __must_check unsigned long
  51. raw_copy_to_user(void __user *to, const void *from, unsigned long n)
  52. {
  53. u32 tmp32;
  54. u64 tmp64;
  55. if (__builtin_constant_p(n)) {
  56. switch (n) {
  57. case 1:
  58. *(u8 __force *)to = *(u8 *)from;
  59. return 0;
  60. case 4:
  61. asm volatile ("ldnw .d1t1 *%2,%0\n"
  62. "nop 4\n"
  63. "stnw .d1t1 %0,*%1\n"
  64. : "=&a"(tmp32)
  65. : "a"(to), "a"(from)
  66. : "memory");
  67. return 0;
  68. case 8:
  69. asm volatile ("ldndw .d1t1 *%2,%0\n"
  70. "nop 4\n"
  71. "stndw .d1t1 %0,*%1\n"
  72. : "=&a"(tmp64)
  73. : "a"(to), "a"(from)
  74. : "memory");
  75. return 0;
  76. default:
  77. break;
  78. }
  79. }
  80. memcpy((void __force *)to, from, n);
  81. return 0;
  82. }
  83. #define INLINE_COPY_FROM_USER
  84. #define INLINE_COPY_TO_USER
  85. extern int _access_ok(unsigned long addr, unsigned long size);
  86. #ifdef CONFIG_ACCESS_CHECK
  87. #define __access_ok _access_ok
  88. #endif
  89. #include <asm-generic/uaccess.h>
  90. #endif /* _ASM_C6X_UACCESS_H */