linux/arch/x86/lib/copy_user_nocache_64.S
<<
>>
Prefs
   1/*
   2 * Copyright 2008 Vitaly Mayatskikh <vmayatsk@redhat.com>
   3 * Copyright 2002 Andi Kleen, SuSE Labs.
   4 * Subject to the GNU Public License v2.
   5 *
   6 * Functions to copy from and to user space.
   7 */
   8
   9#include <linux/linkage.h>
  10#include <asm/dwarf2.h>
  11
  12#define FIX_ALIGNMENT 1
  13
  14#include <asm/current.h>
  15#include <asm/asm-offsets.h>
  16#include <asm/thread_info.h>
  17#include <asm/asm.h>
  18#include <asm/smap.h>
  19
  20        .macro ALIGN_DESTINATION
  21#ifdef FIX_ALIGNMENT
  22        /* check for bad alignment of destination */
  23        movl %edi,%ecx
  24        andl $7,%ecx
  25        jz 102f                         /* already aligned */
  26        subl $8,%ecx
  27        negl %ecx
  28        subl %ecx,%edx
  29100:    movb (%rsi),%al
  30101:    movb %al,(%rdi)
  31        incq %rsi
  32        incq %rdi
  33        decl %ecx
  34        jnz 100b
  35102:
  36        .section .fixup,"ax"
  37103:    addl %ecx,%edx                  /* ecx is zerorest also */
  38        jmp copy_user_handle_tail
  39        .previous
  40
  41        _ASM_EXTABLE(100b,103b)
  42        _ASM_EXTABLE(101b,103b)
  43#endif
  44        .endm
  45
  46/*
  47 * copy_user_nocache - Uncached memory copy with exception handling
  48 * This will force destination/source out of cache for more performance.
  49 */
  50ENTRY(__copy_user_nocache)
  51        CFI_STARTPROC
  52        ASM_STAC
  53        cmpl $8,%edx
  54        jb 20f          /* less then 8 bytes, go to byte copy loop */
  55        ALIGN_DESTINATION
  56        movl %edx,%ecx
  57        andl $63,%edx
  58        shrl $6,%ecx
  59        jz 17f
  601:      movq (%rsi),%r8
  612:      movq 1*8(%rsi),%r9
  623:      movq 2*8(%rsi),%r10
  634:      movq 3*8(%rsi),%r11
  645:      movnti %r8,(%rdi)
  656:      movnti %r9,1*8(%rdi)
  667:      movnti %r10,2*8(%rdi)
  678:      movnti %r11,3*8(%rdi)
  689:      movq 4*8(%rsi),%r8
  6910:     movq 5*8(%rsi),%r9
  7011:     movq 6*8(%rsi),%r10
  7112:     movq 7*8(%rsi),%r11
  7213:     movnti %r8,4*8(%rdi)
  7314:     movnti %r9,5*8(%rdi)
  7415:     movnti %r10,6*8(%rdi)
  7516:     movnti %r11,7*8(%rdi)
  76        leaq 64(%rsi),%rsi
  77        leaq 64(%rdi),%rdi
  78        decl %ecx
  79        jnz 1b
  8017:     movl %edx,%ecx
  81        andl $7,%edx
  82        shrl $3,%ecx
  83        jz 20f
  8418:     movq (%rsi),%r8
  8519:     movnti %r8,(%rdi)
  86        leaq 8(%rsi),%rsi
  87        leaq 8(%rdi),%rdi
  88        decl %ecx
  89        jnz 18b
  9020:     andl %edx,%edx
  91        jz 23f
  92        movl %edx,%ecx
  9321:     movb (%rsi),%al
  9422:     movb %al,(%rdi)
  95        incq %rsi
  96        incq %rdi
  97        decl %ecx
  98        jnz 21b
  9923:     xorl %eax,%eax
 100        ASM_CLAC
 101        sfence
 102        ret
 103
 104        .section .fixup,"ax"
 10530:     shll $6,%ecx
 106        addl %ecx,%edx
 107        jmp 60f
 10840:     lea (%rdx,%rcx,8),%rdx
 109        jmp 60f
 11050:     movl %ecx,%edx
 11160:     sfence
 112        jmp copy_user_handle_tail
 113        .previous
 114
 115        _ASM_EXTABLE(1b,30b)
 116        _ASM_EXTABLE(2b,30b)
 117        _ASM_EXTABLE(3b,30b)
 118        _ASM_EXTABLE(4b,30b)
 119        _ASM_EXTABLE(5b,30b)
 120        _ASM_EXTABLE(6b,30b)
 121        _ASM_EXTABLE(7b,30b)
 122        _ASM_EXTABLE(8b,30b)
 123        _ASM_EXTABLE(9b,30b)
 124        _ASM_EXTABLE(10b,30b)
 125        _ASM_EXTABLE(11b,30b)
 126        _ASM_EXTABLE(12b,30b)
 127        _ASM_EXTABLE(13b,30b)
 128        _ASM_EXTABLE(14b,30b)
 129        _ASM_EXTABLE(15b,30b)
 130        _ASM_EXTABLE(16b,30b)
 131        _ASM_EXTABLE(18b,40b)
 132        _ASM_EXTABLE(19b,40b)
 133        _ASM_EXTABLE(21b,50b)
 134        _ASM_EXTABLE(22b,50b)
 135        CFI_ENDPROC
 136ENDPROC(__copy_user_nocache)
 137