linux/arch/c6x/include/asm/uaccess.h
<<
>>
Prefs
   1/*
   2 *  Copyright (C) 2011 Texas Instruments Incorporated
   3 *  Author: Mark Salter <msalter@redhat.com>
   4 *
   5 *  This program is free software; you can redistribute it and/or modify
   6 *  it under the terms of the GNU General Public License version 2 as
   7 *  published by the Free Software Foundation.
   8 */
   9#ifndef _ASM_C6X_UACCESS_H
  10#define _ASM_C6X_UACCESS_H
  11
  12#include <linux/types.h>
  13#include <linux/compiler.h>
  14#include <linux/string.h>
  15
  16#ifdef CONFIG_ACCESS_CHECK
  17#define __access_ok _access_ok
  18#endif
  19
  20/*
  21 * __copy_from_user/copy_to_user are based on ones in asm-generic/uaccess.h
  22 *
  23 * C6X supports unaligned 32 and 64 bit loads and stores.
  24 */
  25static inline __must_check long __copy_from_user(void *to,
  26                const void __user *from, unsigned long n)
  27{
  28        u32 tmp32;
  29        u64 tmp64;
  30
  31        if (__builtin_constant_p(n)) {
  32                switch (n) {
  33                case 1:
  34                        *(u8 *)to = *(u8 __force *)from;
  35                        return 0;
  36                case 4:
  37                        asm volatile ("ldnw .d1t1 *%2,%0\n"
  38                                      "nop  4\n"
  39                                      "stnw .d1t1 %0,*%1\n"
  40                                      : "=&a"(tmp32)
  41                                      : "A"(to), "a"(from)
  42                                      : "memory");
  43                        return 0;
  44                case 8:
  45                        asm volatile ("ldndw .d1t1 *%2,%0\n"
  46                                      "nop   4\n"
  47                                      "stndw .d1t1 %0,*%1\n"
  48                                      : "=&a"(tmp64)
  49                                      : "a"(to), "a"(from)
  50                                      : "memory");
  51                        return 0;
  52                default:
  53                        break;
  54                }
  55        }
  56
  57        memcpy(to, (const void __force *)from, n);
  58        return 0;
  59}
  60
  61static inline __must_check long __copy_to_user(void __user *to,
  62                const void *from, unsigned long n)
  63{
  64        u32 tmp32;
  65        u64 tmp64;
  66
  67        if (__builtin_constant_p(n)) {
  68                switch (n) {
  69                case 1:
  70                        *(u8 __force *)to = *(u8 *)from;
  71                        return 0;
  72                case 4:
  73                        asm volatile ("ldnw .d1t1 *%2,%0\n"
  74                                      "nop  4\n"
  75                                      "stnw .d1t1 %0,*%1\n"
  76                                      : "=&a"(tmp32)
  77                                      : "a"(to), "a"(from)
  78                                      : "memory");
  79                        return 0;
  80                case 8:
  81                        asm volatile ("ldndw .d1t1 *%2,%0\n"
  82                                      "nop   4\n"
  83                                      "stndw .d1t1 %0,*%1\n"
  84                                      : "=&a"(tmp64)
  85                                      : "a"(to), "a"(from)
  86                                      : "memory");
  87                        return 0;
  88                default:
  89                        break;
  90                }
  91        }
  92
  93        memcpy((void __force *)to, from, n);
  94        return 0;
  95}
  96
  97#define __copy_to_user   __copy_to_user
  98#define __copy_from_user __copy_from_user
  99
 100extern int _access_ok(unsigned long addr, unsigned long size);
 101#ifdef CONFIG_ACCESS_CHECK
 102#define __access_ok _access_ok
 103#endif
 104
 105#include <asm-generic/uaccess.h>
 106
 107#endif /* _ASM_C6X_UACCESS_H */
 108