PageRenderTime 28ms CodeModel.GetById 12ms app.highlight 12ms RepoModel.GetById 0ms app.codeStats 0ms

/arch/x86/lib/copy_user_nocache_64.S

https://bitbucket.org/cresqo/cm7-p500-kernel
Assembly | 137 lines | 129 code | 8 blank | 0 comment | 2 complexity | bc8e982c0ba7ffbea8ebd2c947109e7b MD5 | raw file
Possible License(s): LGPL-2.0, AGPL-1.0, GPL-2.0
  1/*
  2 * Copyright 2008 Vitaly Mayatskikh <vmayatsk@redhat.com>
  3 * Copyright 2002 Andi Kleen, SuSE Labs.
  4 * Subject to the GNU Public License v2.
  5 *
  6 * Functions to copy from and to user space.
  7 */
  8
  9#include <linux/linkage.h>
 10#include <asm/dwarf2.h>
 11
 12#define FIX_ALIGNMENT 1
 13
 14#include <asm/current.h>
 15#include <asm/asm-offsets.h>
 16#include <asm/thread_info.h>
 17
 18	.macro ALIGN_DESTINATION
 19#ifdef FIX_ALIGNMENT
 20	/* check for bad alignment of destination */
 21	movl %edi,%ecx
 22	andl $7,%ecx
 23	jz 102f				/* already aligned */
 24	subl $8,%ecx
 25	negl %ecx
 26	subl %ecx,%edx
 27100:	movb (%rsi),%al
 28101:	movb %al,(%rdi)
 29	incq %rsi
 30	incq %rdi
 31	decl %ecx
 32	jnz 100b
 33102:
 34	.section .fixup,"ax"
 35103:	addl %ecx,%edx			/* ecx is zerorest also */
 36	jmp copy_user_handle_tail
 37	.previous
 38
 39	.section __ex_table,"a"
 40	.align 8
 41	.quad 100b,103b
 42	.quad 101b,103b
 43	.previous
 44#endif
 45	.endm
 46
 47/*
 48 * copy_user_nocache - Uncached memory copy with exception handling
 49 * This will force destination/source out of cache for more performance.
 50 */
 51ENTRY(__copy_user_nocache)
 52	CFI_STARTPROC
 53	cmpl $8,%edx
 54	jb 20f		/* less then 8 bytes, go to byte copy loop */
 55	ALIGN_DESTINATION
 56	movl %edx,%ecx
 57	andl $63,%edx
 58	shrl $6,%ecx
 59	jz 17f
 601:	movq (%rsi),%r8
 612:	movq 1*8(%rsi),%r9
 623:	movq 2*8(%rsi),%r10
 634:	movq 3*8(%rsi),%r11
 645:	movnti %r8,(%rdi)
 656:	movnti %r9,1*8(%rdi)
 667:	movnti %r10,2*8(%rdi)
 678:	movnti %r11,3*8(%rdi)
 689:	movq 4*8(%rsi),%r8
 6910:	movq 5*8(%rsi),%r9
 7011:	movq 6*8(%rsi),%r10
 7112:	movq 7*8(%rsi),%r11
 7213:	movnti %r8,4*8(%rdi)
 7314:	movnti %r9,5*8(%rdi)
 7415:	movnti %r10,6*8(%rdi)
 7516:	movnti %r11,7*8(%rdi)
 76	leaq 64(%rsi),%rsi
 77	leaq 64(%rdi),%rdi
 78	decl %ecx
 79	jnz 1b
 8017:	movl %edx,%ecx
 81	andl $7,%edx
 82	shrl $3,%ecx
 83	jz 20f
 8418:	movq (%rsi),%r8
 8519:	movnti %r8,(%rdi)
 86	leaq 8(%rsi),%rsi
 87	leaq 8(%rdi),%rdi
 88	decl %ecx
 89	jnz 18b
 9020:	andl %edx,%edx
 91	jz 23f
 92	movl %edx,%ecx
 9321:	movb (%rsi),%al
 9422:	movb %al,(%rdi)
 95	incq %rsi
 96	incq %rdi
 97	decl %ecx
 98	jnz 21b
 9923:	xorl %eax,%eax
100	sfence
101	ret
102
103	.section .fixup,"ax"
10430:	shll $6,%ecx
105	addl %ecx,%edx
106	jmp 60f
10740:	lea (%rdx,%rcx,8),%rdx
108	jmp 60f
10950:	movl %ecx,%edx
11060:	sfence
111	jmp copy_user_handle_tail
112	.previous
113
114	.section __ex_table,"a"
115	.quad 1b,30b
116	.quad 2b,30b
117	.quad 3b,30b
118	.quad 4b,30b
119	.quad 5b,30b
120	.quad 6b,30b
121	.quad 7b,30b
122	.quad 8b,30b
123	.quad 9b,30b
124	.quad 10b,30b
125	.quad 11b,30b
126	.quad 12b,30b
127	.quad 13b,30b
128	.quad 14b,30b
129	.quad 15b,30b
130	.quad 16b,30b
131	.quad 18b,40b
132	.quad 19b,40b
133	.quad 21b,50b
134	.quad 22b,50b
135	.previous
136	CFI_ENDPROC
137ENDPROC(__copy_user_nocache)