# 1 "mem.S"

# 1 "/usr/include/linux/linkage.h" 1 3










# 2 "mem.S" 2








	
	.file "mem.S"

  


  
  

  
  
  
  

.text
.align 4; .globl _memcpy4to3; _memcpy4to3: 
	pushl %ebp
	movl %esp,%ebp
	pushl %edi
	pushl %esi
	pushl %ebx
	pushl %ecx
	movl 8(%ebp),%edi	 
	movl 12(%ebp),%esi	 
	movl 16(%ebp),%ecx	 

	 
1:	cmpl $8,%ecx
	jl 2f

	movl (%esi),%eax	 
	movl 4(%esi),%ebx	 
	shll $8,%eax		 
	shrd $8,%ebx,%eax	 
	movl %eax,(%edi)	 
	shll $8,%ebx		 
	movl 8(%esi),%eax	 
	shrd $16,%eax,%ebx	 
	movl %ebx,4(%edi)	 
	shll $8,%eax		 
	movl 12(%esi),%ebx	 
	shrd $24,%ebx,%eax	 
	movl %eax,8(%edi)	 

	movl 16(%esi),%eax	 
	shll $8,%eax		 
	movl 20(%esi),%ebx	 
	shrd $8,%ebx,%eax	 
	movl %eax,12(%edi)	 
	shll $8,%ebx		 
	movl 24(%esi),%eax	 
	shrd $16,%eax,%ebx	 
	movl %ebx,16(%edi)	 
	subl $8,%ecx		 
	shll $8,%eax		 
	movl 28(%esi),%ebx	 
	shrd $24,%ebx,%eax	 
	addl $32,%esi		 
	movl %eax,20(%edi)	 

	addl $24,%edi
	jmp 1b

2:	 

	andl %ecx,%ecx
	jz 4f			 

3:	movl (%esi),%eax
	movw %eax,(%edi)
	shrl $16,%eax
	movb %al,2(%edi)
	addl $4,%esi
	addl $3,%edi
	decl %ecx
	jnz 3b

4:
	popl %ecx
	popl %ebx
	popl %esi
	popl %edi
	popl %ebp
	ret

  
  
  
  

.text
.align 4; .globl _memcpy32shift8; _memcpy32shift8: 
	pushl %ebp
	movl %esp,%ebp
	pushl %edi
	pushl %esi
	pushl %ecx
	popl %ebx
	movl 8(%ebp),%edi	 
	movl 12(%ebp),%esi	 
	movl 16(%ebp),%ecx	 

	 
1:	cmpl $8,%ecx
	jl 2f

	movl (%esi),%eax
	shll $8,%eax
	movl %eax,(%edi)
	movl 4(%esi),%edx
	shll $8,%edx
	movl %edx,4(%edi)
	movl 8(%esi),%eax
	shll $8,%eax
	movl %eax,8(%edi)
	movl 12(%esi),%edx
	shll $8,%edx
	movl %edx,12(%edi)
	movl 16(%esi),%eax
	shll $8,%eax
	movl %eax,16(%edi)
	movl 20(%esi),%edx
	shll $8,%edx
	movl %edx,20(%edi)
	movl 24(%esi),%eax
	subl $8,%ecx
	shll $8,%eax
	movl %eax,24(%edi)
	movl 28(%esi),%edx
	addl $32,%esi
	shll $8,%edx
	movl %edx,28(%edi)
	addl $32,%edi
	jmp 1b

2:	andl %ecx,%ecx
	jz 4f

3:	movl (%esi),%eax
	shll $8,%eax
	movl %eax,(%edi)
	addl $4,%esi
	addl $4,%edi
	decl %ecx
	jnz 3b

4:	
	popl %ebx
	popl %ecx
	popl %esi
	popl %edi
	popl %ebp
	ret


 
 
 
 
 
 

 
 
 
 







.align 4; .globl __memcpy_jumptable; __memcpy_jumptable: 
	.long copy0
	.long copy1, copy2, copy3, copy4
	.long copy5, copy6, copy7, copy8
	.long copy9, copy10, copy11, copy12
	.long copy13, copy14, copy15, copy16
	.long copy17, copy18, copy19, copy20
	.long copy21, copy22, copy23, copy24
	.long copy25, copy26, copy27, copy28
	.long copy29, copy30, copy31, copy32

jumptable2:
	.long align0, align1, align2, align3

.align 4; .globl __memcpyasm_regargs; __memcpyasm_regargs: 

	 

	 
	movl %ebx,%eax
	andl $3,%eax
	jmp *jumptable2(,%eax,4)

align1:	movw  0 (%edx),%ax; movw %ax, 0 (%ebx) 
	movb  2 (%edx),%al; movb %al, 2 (%ebx) 
	addl $3,%edx
	addl $3,%ebx
	subl $3,%ecx
	jmp copyaligned

align3:	movb  0 (%edx),%al; movb %al, 0 (%ebx) 
	incl %edx
	incl %ebx
	decl %ecx
	jmp copyaligned

align2:	movw  0 (%edx),%ax; movw %ax, 0 (%ebx) 
	addl $2,%edx
	addl $2,%ebx
	subl $2,%ecx
align0:

copyaligned:
	cmpl $32,%ecx
	ja copyunrolled
	 
	 
	jmp * _memcpy_jumptable  (,%ecx,4)
	.align 4,0x90

 
.align 4; .globl __memcpyasm_regargs_aligned; __memcpyasm_regargs_aligned: 

copyunrolled:	
	 
	 
	addl $32,%ebx			 
	movl (%edx),%eax
	movl %eax,(0-32)(%ebx)		 
	movl 4(%edx),%eax
	movl %eax,(4-32)(%ebx)		 
	movl 8(%edx),%eax
	movl %eax,(8-32)(%ebx)		 
	movl 12(%edx),%eax
	movl %eax,(12-32)(%ebx)		 
	movl 16(%edx),%eax
	addl $32,%edx			 
	movl %eax,(16-32)(%ebx)
	subl $32,%ecx			 
	movl (20-32)(%edx),%eax
	movl %eax,(20-32)(%ebx)		 
	movl (24-32)(%edx),%eax
	movl %eax,(24-32)(%ebx)		 
	movl (28-32)(%edx),%eax
	movl %eax,(28-32)(%ebx)		 
	cmpl $32,%ecx
	jge copyunrolled		 
	 
	jmp * _memcpy_jumptable  (,%ecx,4)



copy0:	ret 

copy1:	movb  0 (%edx),%al; movb %al, 0 (%ebx) 
	ret 

copy2:	movw  0 (%edx),%ax; movw %ax, 0 (%ebx) 
	ret 

copy3:	movw  0 (%edx),%ax; movw %ax, 0 (%ebx) 
	movb  2 (%edx),%al; movb %al, 2 (%ebx) 
	ret 

copy4:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	ret 

copy5:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movb  4 (%edx),%al; movb %al, 4 (%ebx) 
	ret 

copy6:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movw  4 (%edx),%ax; movw %ax, 4 (%ebx) 
	ret 

copy7:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movw  4 (%edx),%ax; movw %ax, 4 (%ebx) 
	movb  6 (%edx),%al; movb %al, 6 (%ebx) 
	ret 

copy8:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	ret 

copy9:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movb  8 (%edx),%al; movb %al, 8 (%ebx) 
	ret 

copy10:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movw  8 (%edx),%ax; movw %ax, 8 (%ebx) 
	ret 

copy11:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movw  8 (%edx),%ax; movw %ax, 8 (%ebx) 
	movb  10 (%edx),%al; movb %al, 10 (%ebx) 
	ret 

copy12:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	ret 

copy13:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movb  12 (%edx),%al; movb %al, 12 (%ebx) 
	ret 

copy14:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movw  12 (%edx),%ax; movw %ax, 12 (%ebx) 
	ret 

copy15:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movw  12 (%edx),%ax; movw %ax, 12 (%ebx) 
	movb  14 (%edx),%al; movb %al, 14 (%ebx) 
	ret 

copy16:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	ret 

copy17:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movb  16 (%edx),%al; movb %al, 16 (%ebx) 
	ret 

copy18:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movw  16 (%edx),%ax; movw %ax, 16 (%ebx) 
	ret 

copy19:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movw  16 (%edx),%ax; movw %ax, 16 (%ebx) 
	movb  18 (%edx),%al; movb %al, 18 (%ebx) 
	ret 

copy20:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	ret 

copy21:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movb  20 (%edx),%al; movb %al, 20 (%ebx) 
	ret 

copy22:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movw  20 (%edx),%ax; movw %ax, 20 (%ebx) 
	ret 

copy23:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movw  20 (%edx),%ax; movw %ax, 20 (%ebx) 
	movb  22 (%edx),%al; movb %al, 22 (%ebx) 
	ret 

copy24:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movl  20 (%edx),%eax; movl %eax, 20 (%ebx) 
	ret 

copy25:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movl  20 (%edx),%eax; movl %eax, 20 (%ebx) 
	movb  24 (%edx),%al; movb %al, 24 (%ebx) 
	ret 

copy26:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movl  20 (%edx),%eax; movl %eax, 20 (%ebx) 
	movw  24 (%edx),%ax; movw %ax, 24 (%ebx) 
	ret 

copy27:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movl  20 (%edx),%eax; movl %eax, 20 (%ebx) 
	movw  24 (%edx),%ax; movw %ax, 24 (%ebx) 
	movb  26 (%edx),%al; movb %al, 26 (%ebx) 
	ret 

copy28:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movl  20 (%edx),%eax; movl %eax, 20 (%ebx) 
	movl  24 (%edx),%eax; movl %eax, 24 (%ebx) 
	ret 

copy29:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movl  20 (%edx),%eax; movl %eax, 20 (%ebx) 
	movl  24 (%edx),%eax; movl %eax, 24 (%ebx) 
	movb  28 (%edx),%al; movb %al, 28 (%ebx) 
	ret 

copy30:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movl  20 (%edx),%eax; movl %eax, 20 (%ebx) 
	movl  24 (%edx),%eax; movl %eax, 24 (%ebx) 
	movw  28 (%edx),%ax; movw %ax, 28 (%ebx) 
	ret 

copy31:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movl  20 (%edx),%eax; movl %eax, 20 (%ebx) 
	movl  24 (%edx),%eax; movl %eax, 24 (%ebx) 
	movw  28 (%edx),%ax; movw %ax, 28 (%ebx) 
	movb  30 (%edx),%al; movb %al, 30 (%ebx) 
	ret 

copy32:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movl  20 (%edx),%eax; movl %eax, 20 (%ebx) 
	movl  24 (%edx),%eax; movl %eax, 24 (%ebx) 
	movl  28 (%edx),%eax; movl %eax, 28 (%ebx) 
	ret 
# 1 "mem.S"

# 1 "/usr/include/linux/linkage.h" 1 3










# 2 "mem.S" 2








	
	.file "mem.S"

  


  
  

  
  
  
  

.text
.align 4; .globl _memcpy4to3; _memcpy4to3: 
	pushl %ebp
	movl %esp,%ebp
	pushl %edi
	pushl %esi
	pushl %ebx
	pushl %ecx
	movl 8(%ebp),%edi	 
	movl 12(%ebp),%esi	 
	movl 16(%ebp),%ecx	 

	 
1:	cmpl $8,%ecx
	jl 2f

	movl (%esi),%eax	 
	movl 4(%esi),%ebx	 
	shll $8,%eax		 
	shrd $8,%ebx,%eax	 
	movl %eax,(%edi)	 
	shll $8,%ebx		 
	movl 8(%esi),%eax	 
	shrd $16,%eax,%ebx	 
	movl %ebx,4(%edi)	 
	shll $8,%eax		 
	movl 12(%esi),%ebx	 
	shrd $24,%ebx,%eax	 
	movl %eax,8(%edi)	 

	movl 16(%esi),%eax	 
	shll $8,%eax		 
	movl 20(%esi),%ebx	 
	shrd $8,%ebx,%eax	 
	movl %eax,12(%edi)	 
	shll $8,%ebx		 
	movl 24(%esi),%eax	 
	shrd $16,%eax,%ebx	 
	movl %ebx,16(%edi)	 
	subl $8,%ecx		 
	shll $8,%eax		 
	movl 28(%esi),%ebx	 
	shrd $24,%ebx,%eax	 
	addl $32,%esi		 
	movl %eax,20(%edi)	 

	addl $24,%edi
	jmp 1b

2:	 

	andl %ecx,%ecx
	jz 4f			 

3:	movl (%esi),%eax
	movw %eax,(%edi)
	shrl $16,%eax
	movb %al,2(%edi)
	addl $4,%esi
	addl $3,%edi
	decl %ecx
	jnz 3b

4:
	popl %ecx
	popl %ebx
	popl %esi
	popl %edi
	popl %ebp
	ret

  
  
  
  

.text
.align 4; .globl _memcpy32shift8; _memcpy32shift8: 
	pushl %ebp
	movl %esp,%ebp
	pushl %edi
	pushl %esi
	pushl %ecx
	popl %ebx
	movl 8(%ebp),%edi	 
	movl 12(%ebp),%esi	 
	movl 16(%ebp),%ecx	 

	 
1:	cmpl $8,%ecx
	jl 2f

	movl (%esi),%eax
	shll $8,%eax
	movl %eax,(%edi)
	movl 4(%esi),%edx
	shll $8,%edx
	movl %edx,4(%edi)
	movl 8(%esi),%eax
	shll $8,%eax
	movl %eax,8(%edi)
	movl 12(%esi),%edx
	shll $8,%edx
	movl %edx,12(%edi)
	movl 16(%esi),%eax
	shll $8,%eax
	movl %eax,16(%edi)
	movl 20(%esi),%edx
	shll $8,%edx
	movl %edx,20(%edi)
	movl 24(%esi),%eax
	subl $8,%ecx
	shll $8,%eax
	movl %eax,24(%edi)
	movl 28(%esi),%edx
	addl $32,%esi
	shll $8,%edx
	movl %edx,28(%edi)
	addl $32,%edi
	jmp 1b

2:	andl %ecx,%ecx
	jz 4f

3:	movl (%esi),%eax
	shll $8,%eax
	movl %eax,(%edi)
	addl $4,%esi
	addl $4,%edi
	decl %ecx
	jnz 3b

4:	
	popl %ebx
	popl %ecx
	popl %esi
	popl %edi
	popl %ebp
	ret


 
 
 
 
 
 

 
 
 
 







.align 4; .globl __memcpy_jumptable; __memcpy_jumptable: 
	.long copy0
	.long copy1, copy2, copy3, copy4
	.long copy5, copy6, copy7, copy8
	.long copy9, copy10, copy11, copy12
	.long copy13, copy14, copy15, copy16
	.long copy17, copy18, copy19, copy20
	.long copy21, copy22, copy23, copy24
	.long copy25, copy26, copy27, copy28
	.long copy29, copy30, copy31, copy32

jumptable2:
	.long align0, align1, align2, align3

.align 4; .globl __memcpyasm_regargs; __memcpyasm_regargs: 

	 

	 
	movl %ebx,%eax
	andl $3,%eax
	jmp *jumptable2(,%eax,4)

align1:	movw  0 (%edx),%ax; movw %ax, 0 (%ebx) 
	movb  2 (%edx),%al; movb %al, 2 (%ebx) 
	addl $3,%edx
	addl $3,%ebx
	subl $3,%ecx
	jmp copyaligned

align3:	movb  0 (%edx),%al; movb %al, 0 (%ebx) 
	incl %edx
	incl %ebx
	decl %ecx
	jmp copyaligned

align2:	movw  0 (%edx),%ax; movw %ax, 0 (%ebx) 
	addl $2,%edx
	addl $2,%ebx
	subl $2,%ecx
align0:

copyaligned:
	cmpl $32,%ecx
	ja copyunrolled
	 
	 
	jmp * _memcpy_jumptable  (,%ecx,4)
	.align 4,0x90

 
.align 4; .globl __memcpyasm_regargs_aligned; __memcpyasm_regargs_aligned: 

copyunrolled:	
	 
	 
	addl $32,%ebx			 
	movl (%edx),%eax
	movl %eax,(0-32)(%ebx)		 
	movl 4(%edx),%eax
	movl %eax,(4-32)(%ebx)		 
	movl 8(%edx),%eax
	movl %eax,(8-32)(%ebx)		 
	movl 12(%edx),%eax
	movl %eax,(12-32)(%ebx)		 
	movl 16(%edx),%eax
	addl $32,%edx			 
	movl %eax,(16-32)(%ebx)
	subl $32,%ecx			 
	movl (20-32)(%edx),%eax
	movl %eax,(20-32)(%ebx)		 
	movl (24-32)(%edx),%eax
	movl %eax,(24-32)(%ebx)		 
	movl (28-32)(%edx),%eax
	movl %eax,(28-32)(%ebx)		 
	cmpl $32,%ecx
	jge copyunrolled		 
	 
	jmp * _memcpy_jumptable  (,%ecx,4)



copy0:	ret 

copy1:	movb  0 (%edx),%al; movb %al, 0 (%ebx) 
	ret 

copy2:	movw  0 (%edx),%ax; movw %ax, 0 (%ebx) 
	ret 

copy3:	movw  0 (%edx),%ax; movw %ax, 0 (%ebx) 
	movb  2 (%edx),%al; movb %al, 2 (%ebx) 
	ret 

copy4:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	ret 

copy5:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movb  4 (%edx),%al; movb %al, 4 (%ebx) 
	ret 

copy6:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movw  4 (%edx),%ax; movw %ax, 4 (%ebx) 
	ret 

copy7:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movw  4 (%edx),%ax; movw %ax, 4 (%ebx) 
	movb  6 (%edx),%al; movb %al, 6 (%ebx) 
	ret 

copy8:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	ret 

copy9:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movb  8 (%edx),%al; movb %al, 8 (%ebx) 
	ret 

copy10:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movw  8 (%edx),%ax; movw %ax, 8 (%ebx) 
	ret 

copy11:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movw  8 (%edx),%ax; movw %ax, 8 (%ebx) 
	movb  10 (%edx),%al; movb %al, 10 (%ebx) 
	ret 

copy12:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	ret 

copy13:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movb  12 (%edx),%al; movb %al, 12 (%ebx) 
	ret 

copy14:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movw  12 (%edx),%ax; movw %ax, 12 (%ebx) 
	ret 

copy15:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movw  12 (%edx),%ax; movw %ax, 12 (%ebx) 
	movb  14 (%edx),%al; movb %al, 14 (%ebx) 
	ret 

copy16:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	ret 

copy17:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movb  16 (%edx),%al; movb %al, 16 (%ebx) 
	ret 

copy18:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movw  16 (%edx),%ax; movw %ax, 16 (%ebx) 
	ret 

copy19:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movw  16 (%edx),%ax; movw %ax, 16 (%ebx) 
	movb  18 (%edx),%al; movb %al, 18 (%ebx) 
	ret 

copy20:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	ret 

copy21:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movb  20 (%edx),%al; movb %al, 20 (%ebx) 
	ret 

copy22:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movw  20 (%edx),%ax; movw %ax, 20 (%ebx) 
	ret 

copy23:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movw  20 (%edx),%ax; movw %ax, 20 (%ebx) 
	movb  22 (%edx),%al; movb %al, 22 (%ebx) 
	ret 

copy24:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movl  20 (%edx),%eax; movl %eax, 20 (%ebx) 
	ret 

copy25:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movl  20 (%edx),%eax; movl %eax, 20 (%ebx) 
	movb  24 (%edx),%al; movb %al, 24 (%ebx) 
	ret 

copy26:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movl  20 (%edx),%eax; movl %eax, 20 (%ebx) 
	movw  24 (%edx),%ax; movw %ax, 24 (%ebx) 
	ret 

copy27:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movl  20 (%edx),%eax; movl %eax, 20 (%ebx) 
	movw  24 (%edx),%ax; movw %ax, 24 (%ebx) 
	movb  26 (%edx),%al; movb %al, 26 (%ebx) 
	ret 

copy28:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movl  20 (%edx),%eax; movl %eax, 20 (%ebx) 
	movl  24 (%edx),%eax; movl %eax, 24 (%ebx) 
	ret 

copy29:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movl  20 (%edx),%eax; movl %eax, 20 (%ebx) 
	movl  24 (%edx),%eax; movl %eax, 24 (%ebx) 
	movb  28 (%edx),%al; movb %al, 28 (%ebx) 
	ret 

copy30:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movl  20 (%edx),%eax; movl %eax, 20 (%ebx) 
	movl  24 (%edx),%eax; movl %eax, 24 (%ebx) 
	movw  28 (%edx),%ax; movw %ax, 28 (%ebx) 
	ret 

copy31:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movl  20 (%edx),%eax; movl %eax, 20 (%ebx) 
	movl  24 (%edx),%eax; movl %eax, 24 (%ebx) 
	movw  28 (%edx),%ax; movw %ax, 28 (%ebx) 
	movb  30 (%edx),%al; movb %al, 30 (%ebx) 
	ret 

copy32:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movl  20 (%edx),%eax; movl %eax, 20 (%ebx) 
	movl  24 (%edx),%eax; movl %eax, 24 (%ebx) 
	movl  28 (%edx),%eax; movl %eax, 28 (%ebx) 
	ret 
# 1 "mem.S"

# 1 "/usr/include/linux/linkage.h" 1 3










# 2 "mem.S" 2








	
	.file "mem.S"

  


  
  

  
  
  
  

.text
.align 4; .globl _memcpy4to3; _memcpy4to3: 
	pushl %ebp
	movl %esp,%ebp
	pushl %edi
	pushl %esi
	pushl %ebx
	pushl %ecx
	movl 8(%ebp),%edi	 
	movl 12(%ebp),%esi	 
	movl 16(%ebp),%ecx	 

	 
1:	cmpl $8,%ecx
	jl 2f

	movl (%esi),%eax	 
	movl 4(%esi),%ebx	 
	shll $8,%eax		 
	shrd $8,%ebx,%eax	 
	movl %eax,(%edi)	 
	shll $8,%ebx		 
	movl 8(%esi),%eax	 
	shrd $16,%eax,%ebx	 
	movl %ebx,4(%edi)	 
	shll $8,%eax		 
	movl 12(%esi),%ebx	 
	shrd $24,%ebx,%eax	 
	movl %eax,8(%edi)	 

	movl 16(%esi),%eax	 
	shll $8,%eax		 
	movl 20(%esi),%ebx	 
	shrd $8,%ebx,%eax	 
	movl %eax,12(%edi)	 
	shll $8,%ebx		 
	movl 24(%esi),%eax	 
	shrd $16,%eax,%ebx	 
	movl %ebx,16(%edi)	 
	subl $8,%ecx		 
	shll $8,%eax		 
	movl 28(%esi),%ebx	 
	shrd $24,%ebx,%eax	 
	addl $32,%esi		 
	movl %eax,20(%edi)	 

	addl $24,%edi
	jmp 1b

2:	 

	andl %ecx,%ecx
	jz 4f			 

3:	movl (%esi),%eax
	movw %eax,(%edi)
	shrl $16,%eax
	movb %al,2(%edi)
	addl $4,%esi
	addl $3,%edi
	decl %ecx
	jnz 3b

4:
	popl %ecx
	popl %ebx
	popl %esi
	popl %edi
	popl %ebp
	ret

  
  
  
  

.text
.align 4; .globl _memcpy32shift8; _memcpy32shift8: 
	pushl %ebp
	movl %esp,%ebp
	pushl %edi
	pushl %esi
	pushl %ecx
	popl %ebx
	movl 8(%ebp),%edi	 
	movl 12(%ebp),%esi	 
	movl 16(%ebp),%ecx	 

	 
1:	cmpl $8,%ecx
	jl 2f

	movl (%esi),%eax
	shll $8,%eax
	movl %eax,(%edi)
	movl 4(%esi),%edx
	shll $8,%edx
	movl %edx,4(%edi)
	movl 8(%esi),%eax
	shll $8,%eax
	movl %eax,8(%edi)
	movl 12(%esi),%edx
	shll $8,%edx
	movl %edx,12(%edi)
	movl 16(%esi),%eax
	shll $8,%eax
	movl %eax,16(%edi)
	movl 20(%esi),%edx
	shll $8,%edx
	movl %edx,20(%edi)
	movl 24(%esi),%eax
	subl $8,%ecx
	shll $8,%eax
	movl %eax,24(%edi)
	movl 28(%esi),%edx
	addl $32,%esi
	shll $8,%edx
	movl %edx,28(%edi)
	addl $32,%edi
	jmp 1b

2:	andl %ecx,%ecx
	jz 4f

3:	movl (%esi),%eax
	shll $8,%eax
	movl %eax,(%edi)
	addl $4,%esi
	addl $4,%edi
	decl %ecx
	jnz 3b

4:	
	popl %ebx
	popl %ecx
	popl %esi
	popl %edi
	popl %ebp
	ret


 
 
 
 
 
 

 
 
 
 







.align 4; .globl __memcpy_jumptable; __memcpy_jumptable: 
	.long copy0
	.long copy1, copy2, copy3, copy4
	.long copy5, copy6, copy7, copy8
	.long copy9, copy10, copy11, copy12
	.long copy13, copy14, copy15, copy16
	.long copy17, copy18, copy19, copy20
	.long copy21, copy22, copy23, copy24
	.long copy25, copy26, copy27, copy28
	.long copy29, copy30, copy31, copy32

jumptable2:
	.long align0, align1, align2, align3

.align 4; .globl __memcpyasm_regargs; __memcpyasm_regargs: 

	 

	 
	movl %ebx,%eax
	andl $3,%eax
	jmp *jumptable2(,%eax,4)

align1:	movw  0 (%edx),%ax; movw %ax, 0 (%ebx) 
	movb  2 (%edx),%al; movb %al, 2 (%ebx) 
	addl $3,%edx
	addl $3,%ebx
	subl $3,%ecx
	jmp copyaligned

align3:	movb  0 (%edx),%al; movb %al, 0 (%ebx) 
	incl %edx
	incl %ebx
	decl %ecx
	jmp copyaligned

align2:	movw  0 (%edx),%ax; movw %ax, 0 (%ebx) 
	addl $2,%edx
	addl $2,%ebx
	subl $2,%ecx
align0:

copyaligned:
	cmpl $32,%ecx
	ja copyunrolled
	 
	 
	jmp * _memcpy_jumptable  (,%ecx,4)
	.align 4,0x90

 
.align 4; .globl __memcpyasm_regargs_aligned; __memcpyasm_regargs_aligned: 

copyunrolled:	
	 
	 
	addl $32,%ebx			 
	movl (%edx),%eax
	movl %eax,(0-32)(%ebx)		 
	movl 4(%edx),%eax
	movl %eax,(4-32)(%ebx)		 
	movl 8(%edx),%eax
	movl %eax,(8-32)(%ebx)		 
	movl 12(%edx),%eax
	movl %eax,(12-32)(%ebx)		 
	movl 16(%edx),%eax
	addl $32,%edx			 
	movl %eax,(16-32)(%ebx)
	subl $32,%ecx			 
	movl (20-32)(%edx),%eax
	movl %eax,(20-32)(%ebx)		 
	movl (24-32)(%edx),%eax
	movl %eax,(24-32)(%ebx)		 
	movl (28-32)(%edx),%eax
	movl %eax,(28-32)(%ebx)		 
	cmpl $32,%ecx
	jge copyunrolled		 
	 
	jmp * _memcpy_jumptable  (,%ecx,4)



copy0:	ret 

copy1:	movb  0 (%edx),%al; movb %al, 0 (%ebx) 
	ret 

copy2:	movw  0 (%edx),%ax; movw %ax, 0 (%ebx) 
	ret 

copy3:	movw  0 (%edx),%ax; movw %ax, 0 (%ebx) 
	movb  2 (%edx),%al; movb %al, 2 (%ebx) 
	ret 

copy4:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	ret 

copy5:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movb  4 (%edx),%al; movb %al, 4 (%ebx) 
	ret 

copy6:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movw  4 (%edx),%ax; movw %ax, 4 (%ebx) 
	ret 

copy7:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movw  4 (%edx),%ax; movw %ax, 4 (%ebx) 
	movb  6 (%edx),%al; movb %al, 6 (%ebx) 
	ret 

copy8:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	ret 

copy9:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movb  8 (%edx),%al; movb %al, 8 (%ebx) 
	ret 

copy10:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movw  8 (%edx),%ax; movw %ax, 8 (%ebx) 
	ret 

copy11:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movw  8 (%edx),%ax; movw %ax, 8 (%ebx) 
	movb  10 (%edx),%al; movb %al, 10 (%ebx) 
	ret 

copy12:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	ret 

copy13:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movb  12 (%edx),%al; movb %al, 12 (%ebx) 
	ret 

copy14:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movw  12 (%edx),%ax; movw %ax, 12 (%ebx) 
	ret 

copy15:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movw  12 (%edx),%ax; movw %ax, 12 (%ebx) 
	movb  14 (%edx),%al; movb %al, 14 (%ebx) 
	ret 

copy16:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	ret 

copy17:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movb  16 (%edx),%al; movb %al, 16 (%ebx) 
	ret 

copy18:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movw  16 (%edx),%ax; movw %ax, 16 (%ebx) 
	ret 

copy19:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movw  16 (%edx),%ax; movw %ax, 16 (%ebx) 
	movb  18 (%edx),%al; movb %al, 18 (%ebx) 
	ret 

copy20:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	ret 

copy21:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movb  20 (%edx),%al; movb %al, 20 (%ebx) 
	ret 

copy22:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movw  20 (%edx),%ax; movw %ax, 20 (%ebx) 
	ret 

copy23:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movw  20 (%edx),%ax; movw %ax, 20 (%ebx) 
	movb  22 (%edx),%al; movb %al, 22 (%ebx) 
	ret 

copy24:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movl  20 (%edx),%eax; movl %eax, 20 (%ebx) 
	ret 

copy25:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movl  20 (%edx),%eax; movl %eax, 20 (%ebx) 
	movb  24 (%edx),%al; movb %al, 24 (%ebx) 
	ret 

copy26:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movl  20 (%edx),%eax; movl %eax, 20 (%ebx) 
	movw  24 (%edx),%ax; movw %ax, 24 (%ebx) 
	ret 

copy27:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movl  20 (%edx),%eax; movl %eax, 20 (%ebx) 
	movw  24 (%edx),%ax; movw %ax, 24 (%ebx) 
	movb  26 (%edx),%al; movb %al, 26 (%ebx) 
	ret 

copy28:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movl  20 (%edx),%eax; movl %eax, 20 (%ebx) 
	movl  24 (%edx),%eax; movl %eax, 24 (%ebx) 
	ret 

copy29:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movl  20 (%edx),%eax; movl %eax, 20 (%ebx) 
	movl  24 (%edx),%eax; movl %eax, 24 (%ebx) 
	movb  28 (%edx),%al; movb %al, 28 (%ebx) 
	ret 

copy30:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movl  20 (%edx),%eax; movl %eax, 20 (%ebx) 
	movl  24 (%edx),%eax; movl %eax, 24 (%ebx) 
	movw  28 (%edx),%ax; movw %ax, 28 (%ebx) 
	ret 

copy31:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movl  20 (%edx),%eax; movl %eax, 20 (%ebx) 
	movl  24 (%edx),%eax; movl %eax, 24 (%ebx) 
	movw  28 (%edx),%ax; movw %ax, 28 (%ebx) 
	movb  30 (%edx),%al; movb %al, 30 (%ebx) 
	ret 

copy32:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movl  20 (%edx),%eax; movl %eax, 20 (%ebx) 
	movl  24 (%edx),%eax; movl %eax, 24 (%ebx) 
	movl  28 (%edx),%eax; movl %eax, 28 (%ebx) 
	ret 
# 1 "mem.S"

# 1 "/usr/include/linux/linkage.h" 1 3










# 2 "mem.S" 2








	
	.file "mem.S"

  


  
  

  
  
  
  

.text
.align 4; .globl _memcpy4to3; _memcpy4to3: 
	pushl %ebp
	movl %esp,%ebp
	pushl %edi
	pushl %esi
	pushl %ebx
	pushl %ecx
	movl 8(%ebp),%edi	 
	movl 12(%ebp),%esi	 
	movl 16(%ebp),%ecx	 

	 
1:	cmpl $8,%ecx
	jl 2f

	movl (%esi),%eax	 
	movl 4(%esi),%ebx	 
	shll $8,%eax		 
	shrd $8,%ebx,%eax	 
	movl %eax,(%edi)	 
	shll $8,%ebx		 
	movl 8(%esi),%eax	 
	shrd $16,%eax,%ebx	 
	movl %ebx,4(%edi)	 
	shll $8,%eax		 
	movl 12(%esi),%ebx	 
	shrd $24,%ebx,%eax	 
	movl %eax,8(%edi)	 

	movl 16(%esi),%eax	 
	shll $8,%eax		 
	movl 20(%esi),%ebx	 
	shrd $8,%ebx,%eax	 
	movl %eax,12(%edi)	 
	shll $8,%ebx		 
	movl 24(%esi),%eax	 
	shrd $16,%eax,%ebx	 
	movl %ebx,16(%edi)	 
	subl $8,%ecx		 
	shll $8,%eax		 
	movl 28(%esi),%ebx	 
	shrd $24,%ebx,%eax	 
	addl $32,%esi		 
	movl %eax,20(%edi)	 

	addl $24,%edi
	jmp 1b

2:	 

	andl %ecx,%ecx
	jz 4f			 

3:	movl (%esi),%eax
	movw %eax,(%edi)
	shrl $16,%eax
	movb %al,2(%edi)
	addl $4,%esi
	addl $3,%edi
	decl %ecx
	jnz 3b

4:
	popl %ecx
	popl %ebx
	popl %esi
	popl %edi
	popl %ebp
	ret

  
  
  
  

.text
.align 4; .globl _memcpy32shift8; _memcpy32shift8: 
	pushl %ebp
	movl %esp,%ebp
	pushl %edi
	pushl %esi
	pushl %ecx
	popl %ebx
	movl 8(%ebp),%edi	 
	movl 12(%ebp),%esi	 
	movl 16(%ebp),%ecx	 

	 
1:	cmpl $8,%ecx
	jl 2f

	movl (%esi),%eax
	shll $8,%eax
	movl %eax,(%edi)
	movl 4(%esi),%edx
	shll $8,%edx
	movl %edx,4(%edi)
	movl 8(%esi),%eax
	shll $8,%eax
	movl %eax,8(%edi)
	movl 12(%esi),%edx
	shll $8,%edx
	movl %edx,12(%edi)
	movl 16(%esi),%eax
	shll $8,%eax
	movl %eax,16(%edi)
	movl 20(%esi),%edx
	shll $8,%edx
	movl %edx,20(%edi)
	movl 24(%esi),%eax
	subl $8,%ecx
	shll $8,%eax
	movl %eax,24(%edi)
	movl 28(%esi),%edx
	addl $32,%esi
	shll $8,%edx
	movl %edx,28(%edi)
	addl $32,%edi
	jmp 1b

2:	andl %ecx,%ecx
	jz 4f

3:	movl (%esi),%eax
	shll $8,%eax
	movl %eax,(%edi)
	addl $4,%esi
	addl $4,%edi
	decl %ecx
	jnz 3b

4:	
	popl %ebx
	popl %ecx
	popl %esi
	popl %edi
	popl %ebp
	ret


 
 
 
 
 
 

 
 
 
 







.align 4; .globl __memcpy_jumptable; __memcpy_jumptable: 
	.long copy0
	.long copy1, copy2, copy3, copy4
	.long copy5, copy6, copy7, copy8
	.long copy9, copy10, copy11, copy12
	.long copy13, copy14, copy15, copy16
	.long copy17, copy18, copy19, copy20
	.long copy21, copy22, copy23, copy24
	.long copy25, copy26, copy27, copy28
	.long copy29, copy30, copy31, copy32

jumptable2:
	.long align0, align1, align2, align3

.align 4; .globl __memcpyasm_regargs; __memcpyasm_regargs: 

	 

	 
	movl %ebx,%eax
	andl $3,%eax
	jmp *jumptable2(,%eax,4)

align1:	movw  0 (%edx),%ax; movw %ax, 0 (%ebx) 
	movb  2 (%edx),%al; movb %al, 2 (%ebx) 
	addl $3,%edx
	addl $3,%ebx
	subl $3,%ecx
	jmp copyaligned

align3:	movb  0 (%edx),%al; movb %al, 0 (%ebx) 
	incl %edx
	incl %ebx
	decl %ecx
	jmp copyaligned

align2:	movw  0 (%edx),%ax; movw %ax, 0 (%ebx) 
	addl $2,%edx
	addl $2,%ebx
	subl $2,%ecx
align0:

copyaligned:
	cmpl $32,%ecx
	ja copyunrolled
	 
	 
	jmp * _memcpy_jumptable  (,%ecx,4)
	.align 4,0x90

 
.align 4; .globl __memcpyasm_regargs_aligned; __memcpyasm_regargs_aligned: 

copyunrolled:	
	 
	 
	addl $32,%ebx			 
	movl (%edx),%eax
	movl %eax,(0-32)(%ebx)		 
	movl 4(%edx),%eax
	movl %eax,(4-32)(%ebx)		 
	movl 8(%edx),%eax
	movl %eax,(8-32)(%ebx)		 
	movl 12(%edx),%eax
	movl %eax,(12-32)(%ebx)		 
	movl 16(%edx),%eax
	addl $32,%edx			 
	movl %eax,(16-32)(%ebx)
	subl $32,%ecx			 
	movl (20-32)(%edx),%eax
	movl %eax,(20-32)(%ebx)		 
	movl (24-32)(%edx),%eax
	movl %eax,(24-32)(%ebx)		 
	movl (28-32)(%edx),%eax
	movl %eax,(28-32)(%ebx)		 
	cmpl $32,%ecx
	jge copyunrolled		 
	 
	jmp * _memcpy_jumptable  (,%ecx,4)



copy0:	ret 

copy1:	movb  0 (%edx),%al; movb %al, 0 (%ebx) 
	ret 

copy2:	movw  0 (%edx),%ax; movw %ax, 0 (%ebx) 
	ret 

copy3:	movw  0 (%edx),%ax; movw %ax, 0 (%ebx) 
	movb  2 (%edx),%al; movb %al, 2 (%ebx) 
	ret 

copy4:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	ret 

copy5:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movb  4 (%edx),%al; movb %al, 4 (%ebx) 
	ret 

copy6:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movw  4 (%edx),%ax; movw %ax, 4 (%ebx) 
	ret 

copy7:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movw  4 (%edx),%ax; movw %ax, 4 (%ebx) 
	movb  6 (%edx),%al; movb %al, 6 (%ebx) 
	ret 

copy8:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	ret 

copy9:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movb  8 (%edx),%al; movb %al, 8 (%ebx) 
	ret 

copy10:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movw  8 (%edx),%ax; movw %ax, 8 (%ebx) 
	ret 

copy11:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movw  8 (%edx),%ax; movw %ax, 8 (%ebx) 
	movb  10 (%edx),%al; movb %al, 10 (%ebx) 
	ret 

copy12:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	ret 

copy13:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movb  12 (%edx),%al; movb %al, 12 (%ebx) 
	ret 

copy14:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movw  12 (%edx),%ax; movw %ax, 12 (%ebx) 
	ret 

copy15:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movw  12 (%edx),%ax; movw %ax, 12 (%ebx) 
	movb  14 (%edx),%al; movb %al, 14 (%ebx) 
	ret 

copy16:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	ret 

copy17:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movb  16 (%edx),%al; movb %al, 16 (%ebx) 
	ret 

copy18:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movw  16 (%edx),%ax; movw %ax, 16 (%ebx) 
	ret 

copy19:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movw  16 (%edx),%ax; movw %ax, 16 (%ebx) 
	movb  18 (%edx),%al; movb %al, 18 (%ebx) 
	ret 

copy20:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	ret 

copy21:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movb  20 (%edx),%al; movb %al, 20 (%ebx) 
	ret 

copy22:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movw  20 (%edx),%ax; movw %ax, 20 (%ebx) 
	ret 

copy23:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movw  20 (%edx),%ax; movw %ax, 20 (%ebx) 
	movb  22 (%edx),%al; movb %al, 22 (%ebx) 
	ret 

copy24:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movl  20 (%edx),%eax; movl %eax, 20 (%ebx) 
	ret 

copy25:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movl  20 (%edx),%eax; movl %eax, 20 (%ebx) 
	movb  24 (%edx),%al; movb %al, 24 (%ebx) 
	ret 

copy26:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movl  20 (%edx),%eax; movl %eax, 20 (%ebx) 
	movw  24 (%edx),%ax; movw %ax, 24 (%ebx) 
	ret 

copy27:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movl  20 (%edx),%eax; movl %eax, 20 (%ebx) 
	movw  24 (%edx),%ax; movw %ax, 24 (%ebx) 
	movb  26 (%edx),%al; movb %al, 26 (%ebx) 
	ret 

copy28:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movl  20 (%edx),%eax; movl %eax, 20 (%ebx) 
	movl  24 (%edx),%eax; movl %eax, 24 (%ebx) 
	ret 

copy29:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movl  20 (%edx),%eax; movl %eax, 20 (%ebx) 
	movl  24 (%edx),%eax; movl %eax, 24 (%ebx) 
	movb  28 (%edx),%al; movb %al, 28 (%ebx) 
	ret 

copy30:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movl  20 (%edx),%eax; movl %eax, 20 (%ebx) 
	movl  24 (%edx),%eax; movl %eax, 24 (%ebx) 
	movw  28 (%edx),%ax; movw %ax, 28 (%ebx) 
	ret 

copy31:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movl  20 (%edx),%eax; movl %eax, 20 (%ebx) 
	movl  24 (%edx),%eax; movl %eax, 24 (%ebx) 
	movw  28 (%edx),%ax; movw %ax, 28 (%ebx) 
	movb  30 (%edx),%al; movb %al, 30 (%ebx) 
	ret 

copy32:	movl  0 (%edx),%eax; movl %eax, 0 (%ebx) 
	movl  4 (%edx),%eax; movl %eax, 4 (%ebx) 
	movl  8 (%edx),%eax; movl %eax, 8 (%ebx) 
	movl  12 (%edx),%eax; movl %eax, 12 (%ebx) 
	movl  16 (%edx),%eax; movl %eax, 16 (%ebx) 
	movl  20 (%edx),%eax; movl %eax, 20 (%ebx) 
	movl  24 (%edx),%eax; movl %eax, 24 (%ebx) 
	movl  28 (%edx),%eax; movl %eax, 28 (%ebx) 
	ret 
