linux/arch/c6x/include/asm/unaligned.h
<<
>>
Prefs
   1/* SPDX-License-Identifier: GPL-2.0-only */
   2/*
   3 *  Port on Texas Instruments TMS320C6x architecture
   4 *
   5 *  Copyright (C) 2004, 2009, 2010 Texas Instruments Incorporated
   6 *  Author: Aurelien Jacquiot (aurelien.jacquiot@jaluna.com)
   7 *  Rewritten for 2.6.3x: Mark Salter <msalter@redhat.com>
   8 */
   9#ifndef _ASM_C6X_UNALIGNED_H
  10#define _ASM_C6X_UNALIGNED_H
  11
  12#include <linux/swab.h>
  13
  14/*
  15 * The C64x+ can do unaligned word and dword accesses in hardware
  16 * using special load/store instructions.
  17 */
  18
  19static inline u16 get_unaligned_le16(const void *p)
  20{
  21        const u8 *_p = p;
  22        return _p[0] | _p[1] << 8;
  23}
  24
  25static inline u16 get_unaligned_be16(const void *p)
  26{
  27        const u8 *_p = p;
  28        return _p[0] << 8 | _p[1];
  29}
  30
  31static inline void put_unaligned_le16(u16 val, void *p)
  32{
  33        u8 *_p = p;
  34        _p[0] = val;
  35        _p[1] = val >> 8;
  36}
  37
  38static inline void put_unaligned_be16(u16 val, void *p)
  39{
  40        u8 *_p = p;
  41        _p[0] = val >> 8;
  42        _p[1] = val;
  43}
  44
  45static inline u32 get_unaligned32(const void *p)
  46{
  47        u32 val = (u32) p;
  48        asm (" ldnw     .d1t1   *%0,%0\n"
  49             " nop     4\n"
  50             : "+a"(val));
  51        return val;
  52}
  53
  54static inline void put_unaligned32(u32 val, void *p)
  55{
  56        asm volatile (" stnw    .d2t1   %0,*%1\n"
  57                      : : "a"(val), "b"(p) : "memory");
  58}
  59
  60static inline u64 get_unaligned64(const void *p)
  61{
  62        u64 val;
  63        asm volatile (" ldndw   .d1t1   *%1,%0\n"
  64                      " nop     4\n"
  65                      : "=a"(val) : "a"(p));
  66        return val;
  67}
  68
  69static inline void put_unaligned64(u64 val, const void *p)
  70{
  71        asm volatile (" stndw   .d2t1   %0,*%1\n"
  72                      : : "a"(val), "b"(p) : "memory");
  73}
  74
  75#ifdef CONFIG_CPU_BIG_ENDIAN
  76
  77#define get_unaligned_le32(p)    __swab32(get_unaligned32(p))
  78#define get_unaligned_le64(p)    __swab64(get_unaligned64(p))
  79#define get_unaligned_be32(p)    get_unaligned32(p)
  80#define get_unaligned_be64(p)    get_unaligned64(p)
  81#define put_unaligned_le32(v, p) put_unaligned32(__swab32(v), (p))
  82#define put_unaligned_le64(v, p) put_unaligned64(__swab64(v), (p))
  83#define put_unaligned_be32(v, p) put_unaligned32((v), (p))
  84#define put_unaligned_be64(v, p) put_unaligned64((v), (p))
  85#define get_unaligned   __get_unaligned_be
  86#define put_unaligned   __put_unaligned_be
  87
  88#else
  89
  90#define get_unaligned_le32(p)    get_unaligned32(p)
  91#define get_unaligned_le64(p)    get_unaligned64(p)
  92#define get_unaligned_be32(p)    __swab32(get_unaligned32(p))
  93#define get_unaligned_be64(p)    __swab64(get_unaligned64(p))
  94#define put_unaligned_le32(v, p) put_unaligned32((v), (p))
  95#define put_unaligned_le64(v, p) put_unaligned64((v), (p))
  96#define put_unaligned_be32(v, p) put_unaligned32(__swab32(v), (p))
  97#define put_unaligned_be64(v, p) put_unaligned64(__swab64(v), (p))
  98#define get_unaligned   __get_unaligned_le
  99#define put_unaligned   __put_unaligned_le
 100
 101#endif
 102
 103/*
 104 * Cause a link-time error if we try an unaligned access other than
 105 * 1,2,4 or 8 bytes long
 106 */
 107extern int __bad_unaligned_access_size(void);
 108
 109#define __get_unaligned_le(ptr) (typeof(*(ptr)))({                      \
 110        sizeof(*(ptr)) == 1 ? *(ptr) :                                  \
 111          (sizeof(*(ptr)) == 2 ? get_unaligned_le16((ptr)) :            \
 112             (sizeof(*(ptr)) == 4 ? get_unaligned_le32((ptr)) :         \
 113                (sizeof(*(ptr)) == 8 ? get_unaligned_le64((ptr)) :      \
 114                   __bad_unaligned_access_size())));                    \
 115        })
 116
 117#define __get_unaligned_be(ptr) (__force typeof(*(ptr)))({      \
 118        sizeof(*(ptr)) == 1 ? *(ptr) :                                  \
 119          (sizeof(*(ptr)) == 2 ? get_unaligned_be16((ptr)) :            \
 120             (sizeof(*(ptr)) == 4 ? get_unaligned_be32((ptr)) :         \
 121                (sizeof(*(ptr)) == 8 ? get_unaligned_be64((ptr)) :      \
 122                   __bad_unaligned_access_size())));                    \
 123        })
 124
 125#define __put_unaligned_le(val, ptr) ({                                 \
 126        void *__gu_p = (ptr);                                           \
 127        switch (sizeof(*(ptr))) {                                       \
 128        case 1:                                                         \
 129                *(u8 *)__gu_p = (__force u8)(val);                      \
 130                break;                                                  \
 131        case 2:                                                         \
 132                put_unaligned_le16((__force u16)(val), __gu_p);         \
 133                break;                                                  \
 134        case 4:                                                         \
 135                put_unaligned_le32((__force u32)(val), __gu_p);         \
 136                break;                                                  \
 137        case 8:                                                         \
 138                put_unaligned_le64((__force u64)(val), __gu_p);         \
 139                break;                                                  \
 140        default:                                                        \
 141                __bad_unaligned_access_size();                          \
 142                break;                                                  \
 143        }                                                               \
 144        (void)0; })
 145
 146#define __put_unaligned_be(val, ptr) ({                                 \
 147        void *__gu_p = (ptr);                                           \
 148        switch (sizeof(*(ptr))) {                                       \
 149        case 1:                                                         \
 150                *(u8 *)__gu_p = (__force u8)(val);                      \
 151                break;                                                  \
 152        case 2:                                                         \
 153                put_unaligned_be16((__force u16)(val), __gu_p);         \
 154                break;                                                  \
 155        case 4:                                                         \
 156                put_unaligned_be32((__force u32)(val), __gu_p);         \
 157                break;                                                  \
 158        case 8:                                                         \
 159                put_unaligned_be64((__force u64)(val), __gu_p);         \
 160                break;                                                  \
 161        default:                                                        \
 162                __bad_unaligned_access_size();                          \
 163                break;                                                  \
 164        }                                                               \
 165        (void)0; })
 166
 167#endif /* _ASM_C6X_UNALIGNED_H */
 168