Mon Jun 6 06:08:53 2011 UTC ()
re-gen of the assembler files created these
(changed files were committed previously)


(spz)
diff -r0 -r1.1 src/crypto/external/bsd/openssl/lib/libcrypto/arch/i386/aesni-x86.S
diff -r0 -r1.1 src/crypto/external/bsd/openssl/lib/libcrypto/arch/i386/cmll-x86.S
diff -r0 -r1.1 src/crypto/external/bsd/openssl/lib/libcrypto/arch/i386/uplink-x86.S
diff -r0 -r1.1 src/crypto/external/bsd/openssl/lib/libcrypto/arch/sparc64/sha1-sparcv9a.S
diff -r0 -r1.1 src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/cmll-x86_64.S
diff -r0 -r1.1 src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/uplink-x86_64.S

File Added: src/crypto/external/bsd/openssl/lib/libcrypto/arch/i386/aesni-x86.S
.file	"/home/spz/cvs/src/crypto/external/bsd/openssl/dist/crypto/aes/asm/aesni-x86.s"
.text
.globl	aesni_encrypt
.type	aesni_encrypt,@function
.align	16
aesni_encrypt:
.L_aesni_encrypt_begin:
	movl	4(%esp),%eax
	movl	12(%esp),%edx
	movups	(%eax),%xmm0
	movl	240(%edx),%ecx
	movl	8(%esp),%eax
	movups	(%edx),%xmm3
	movups	16(%edx),%xmm4
	leal	32(%edx),%edx
	pxor	%xmm3,%xmm0
.L000enc1_loop:
	aesenc	%xmm4,%xmm0
	decl	%ecx
	movups	(%edx),%xmm4
	leal	16(%edx),%edx
	jnz	.L000enc1_loop
	aesenclast	%xmm4,%xmm0
	movups	%xmm0,(%eax)
	ret
.size	aesni_encrypt,.-.L_aesni_encrypt_begin
.globl	aesni_decrypt
.type	aesni_decrypt,@function
.align	16
aesni_decrypt:
.L_aesni_decrypt_begin:
	movl	4(%esp),%eax
	movl	12(%esp),%edx
	movups	(%eax),%xmm0
	movl	240(%edx),%ecx
	movl	8(%esp),%eax
	movups	(%edx),%xmm3
	movups	16(%edx),%xmm4
	leal	32(%edx),%edx
	pxor	%xmm3,%xmm0
.L001dec1_loop:
	aesdec	%xmm4,%xmm0
	decl	%ecx
	movups	(%edx),%xmm4
	leal	16(%edx),%edx
	jnz	.L001dec1_loop
	aesdeclast	%xmm4,%xmm0
	movups	%xmm0,(%eax)
	ret
.size	aesni_decrypt,.-.L_aesni_decrypt_begin
.type	_aesni_encrypt3,@function
.align	16
_aesni_encrypt3:
	movups	(%edx),%xmm3
	shrl	$1,%ecx
	movups	16(%edx),%xmm4
	leal	32(%edx),%edx
	pxor	%xmm3,%xmm0
	pxor	%xmm3,%xmm1
	pxor	%xmm3,%xmm2
	jmp	.L002enc3_loop
.align	16
.L002enc3_loop:
	aesenc	%xmm4,%xmm0
	movups	(%edx),%xmm3
	aesenc	%xmm4,%xmm1
	decl	%ecx
	aesenc	%xmm4,%xmm2
	movups	16(%edx),%xmm4
	aesenc	%xmm3,%xmm0
	leal	32(%edx),%edx
	aesenc	%xmm3,%xmm1
	aesenc	%xmm3,%xmm2
	jnz	.L002enc3_loop
	aesenc	%xmm4,%xmm0
	movups	(%edx),%xmm3
	aesenc	%xmm4,%xmm1
	aesenc	%xmm4,%xmm2
	aesenclast	%xmm3,%xmm0
	aesenclast	%xmm3,%xmm1
	aesenclast	%xmm3,%xmm2
	ret
.size	_aesni_encrypt3,.-_aesni_encrypt3
.type	_aesni_decrypt3,@function
.align	16
_aesni_decrypt3:
	movups	(%edx),%xmm3
	shrl	$1,%ecx
	movups	16(%edx),%xmm4
	leal	32(%edx),%edx
	pxor	%xmm3,%xmm0
	pxor	%xmm3,%xmm1
	pxor	%xmm3,%xmm2
	jmp	.L003dec3_loop
.align	16
.L003dec3_loop:
	aesdec	%xmm4,%xmm0
	movups	(%edx),%xmm3
	aesdec	%xmm4,%xmm1
	decl	%ecx
	aesdec	%xmm4,%xmm2
	movups	16(%edx),%xmm4
	aesdec	%xmm3,%xmm0
	leal	32(%edx),%edx
	aesdec	%xmm3,%xmm1
	aesdec	%xmm3,%xmm2
	jnz	.L003dec3_loop
	aesdec	%xmm4,%xmm0
	movups	(%edx),%xmm3
	aesdec	%xmm4,%xmm1
	aesdec	%xmm4,%xmm2
	aesdeclast	%xmm3,%xmm0
	aesdeclast	%xmm3,%xmm1
	aesdeclast	%xmm3,%xmm2
	ret
.size	_aesni_decrypt3,.-_aesni_decrypt3
.type	_aesni_encrypt4,@function
.align	16
_aesni_encrypt4:
	movups	(%edx),%xmm3
	movups	16(%edx),%xmm4
	shrl	$1,%ecx
	leal	32(%edx),%edx
	pxor	%xmm3,%xmm0
	pxor	%xmm3,%xmm1
	pxor	%xmm3,%xmm2
	pxor	%xmm3,%xmm7
	jmp	.L004enc3_loop
.align	16
.L004enc3_loop:
	aesenc	%xmm4,%xmm0
	movups	(%edx),%xmm3
	aesenc	%xmm4,%xmm1
	decl	%ecx
	aesenc	%xmm4,%xmm2
	aesenc	%xmm4,%xmm7
	movups	16(%edx),%xmm4
	aesenc	%xmm3,%xmm0
	leal	32(%edx),%edx
	aesenc	%xmm3,%xmm1
	aesenc	%xmm3,%xmm2
	aesenc	%xmm3,%xmm7
	jnz	.L004enc3_loop
	aesenc	%xmm4,%xmm0
	movups	(%edx),%xmm3
	aesenc	%xmm4,%xmm1
	aesenc	%xmm4,%xmm2
	aesenc	%xmm4,%xmm7
	aesenclast	%xmm3,%xmm0
	aesenclast	%xmm3,%xmm1
	aesenclast	%xmm3,%xmm2
	aesenclast	%xmm3,%xmm7
	ret
.size	_aesni_encrypt4,.-_aesni_encrypt4
.type	_aesni_decrypt4,@function
.align	16
_aesni_decrypt4:
	movups	(%edx),%xmm3
	movups	16(%edx),%xmm4
	shrl	$1,%ecx
	leal	32(%edx),%edx
	pxor	%xmm3,%xmm0
	pxor	%xmm3,%xmm1
	pxor	%xmm3,%xmm2
	pxor	%xmm3,%xmm7
	jmp	.L005dec3_loop
.align	16
.L005dec3_loop:
	aesdec	%xmm4,%xmm0
	movups	(%edx),%xmm3
	aesdec	%xmm4,%xmm1
	decl	%ecx
	aesdec	%xmm4,%xmm2
	aesdec	%xmm4,%xmm7
	movups	16(%edx),%xmm4
	aesdec	%xmm3,%xmm0
	leal	32(%edx),%edx
	aesdec	%xmm3,%xmm1
	aesdec	%xmm3,%xmm2
	aesdec	%xmm3,%xmm7
	jnz	.L005dec3_loop
	aesdec	%xmm4,%xmm0
	movups	(%edx),%xmm3
	aesdec	%xmm4,%xmm1
	aesdec	%xmm4,%xmm2
	aesdec	%xmm4,%xmm7
	aesdeclast	%xmm3,%xmm0
	aesdeclast	%xmm3,%xmm1
	aesdeclast	%xmm3,%xmm2
	aesdeclast	%xmm3,%xmm7
	ret
.size	_aesni_decrypt4,.-_aesni_decrypt4
.globl	aesni_ecb_encrypt
.type	aesni_ecb_encrypt,@function
.align	16
aesni_ecb_encrypt:
.L_aesni_ecb_encrypt_begin:
	pushl	%ebp
	pushl	%ebx
	pushl	%esi
	pushl	%edi
	movl	20(%esp),%esi
	movl	24(%esp),%edi
	movl	28(%esp),%eax
	movl	32(%esp),%edx
	movl	36(%esp),%ecx
	cmpl	$16,%eax
	jb	.L006ecb_ret
	andl	$-16,%eax
	testl	%ecx,%ecx
	movl	240(%edx),%ecx
	movl	%edx,%ebp
	movl	%ecx,%ebx
	jz	.L007ecb_decrypt
	subl	$64,%eax
	jbe	.L008ecb_enc_tail
	jmp	.L009ecb_enc_loop3
.align	16
.L009ecb_enc_loop3:
	movups	(%esi),%xmm0
	movups	16(%esi),%xmm1
	movups	32(%esi),%xmm2
	call	_aesni_encrypt3
	subl	$48,%eax
	leal	48(%esi),%esi
	leal	48(%edi),%edi
	movups	%xmm0,-48(%edi)
	movl	%ebp,%edx
	movups	%xmm1,-32(%edi)
	movl	%ebx,%ecx
	movups	%xmm2,-16(%edi)
	ja	.L009ecb_enc_loop3
.L008ecb_enc_tail:
	addl	$64,%eax
	jz	.L006ecb_ret
	cmpl	$16,%eax
	movups	(%esi),%xmm0
	je	.L010ecb_enc_one
	cmpl	$32,%eax
	movups	16(%esi),%xmm1
	je	.L011ecb_enc_two
	cmpl	$48,%eax
	movups	32(%esi),%xmm2
	je	.L012ecb_enc_three
	movups	48(%esi),%xmm7
	call	_aesni_encrypt4
	movups	%xmm0,(%edi)
	movups	%xmm1,16(%edi)
	movups	%xmm2,32(%edi)
	movups	%xmm7,48(%edi)
	jmp	.L006ecb_ret
.align	16
.L010ecb_enc_one:
	movups	(%edx),%xmm3
	movups	16(%edx),%xmm4
	leal	32(%edx),%edx
	pxor	%xmm3,%xmm0
.L013enc1_loop:
	aesenc	%xmm4,%xmm0
	decl	%ecx
	movups	(%edx),%xmm4
	leal	16(%edx),%edx
	jnz	.L013enc1_loop
	aesenclast	%xmm4,%xmm0
	movups	%xmm0,(%edi)
	jmp	.L006ecb_ret
.align	16
.L011ecb_enc_two:
	call	_aesni_encrypt3
	movups	%xmm0,(%edi)
	movups	%xmm1,16(%edi)
	jmp	.L006ecb_ret
.align	16
.L012ecb_enc_three:
	call	_aesni_encrypt3
	movups	%xmm0,(%edi)
	movups	%xmm1,16(%edi)
	movups	%xmm2,32(%edi)
	jmp	.L006ecb_ret
.align	16
.L007ecb_decrypt:
	subl	$64,%eax
	jbe	.L014ecb_dec_tail
	jmp	.L015ecb_dec_loop3
.align	16
.L015ecb_dec_loop3:
	movups	(%esi),%xmm0
	movups	16(%esi),%xmm1
	movups	32(%esi),%xmm2
	call	_aesni_decrypt3
	subl	$48,%eax
	leal	48(%esi),%esi
	leal	48(%edi),%edi
	movups	%xmm0,-48(%edi)
	movl	%ebp,%edx
	movups	%xmm1,-32(%edi)
	movl	%ebx,%ecx
	movups	%xmm2,-16(%edi)
	ja	.L015ecb_dec_loop3
.L014ecb_dec_tail:
	addl	$64,%eax
	jz	.L006ecb_ret
	cmpl	$16,%eax
	movups	(%esi),%xmm0
	je	.L016ecb_dec_one
	cmpl	$32,%eax
	movups	16(%esi),%xmm1
	je	.L017ecb_dec_two
	cmpl	$48,%eax
	movups	32(%esi),%xmm2
	je	.L018ecb_dec_three
	movups	48(%esi),%xmm7
	call	_aesni_decrypt4
	movups	%xmm0,(%edi)
	movups	%xmm1,16(%edi)
	movups	%xmm2,32(%edi)
	movups	%xmm7,48(%edi)
	jmp	.L006ecb_ret
.align	16
.L016ecb_dec_one:
	movups	(%edx),%xmm3
	movups	16(%edx),%xmm4
	leal	32(%edx),%edx
	pxor	%xmm3,%xmm0
.L019dec1_loop:
	aesdec	%xmm4,%xmm0
	decl	%ecx
	movups	(%edx),%xmm4
	leal	16(%edx),%edx
	jnz	.L019dec1_loop
	aesdeclast	%xmm4,%xmm0
	movups	%xmm0,(%edi)
	jmp	.L006ecb_ret
.align	16
.L017ecb_dec_two:
	call	_aesni_decrypt3
	movups	%xmm0,(%edi)
	movups	%xmm1,16(%edi)
	jmp	.L006ecb_ret
.align	16
.L018ecb_dec_three:
	call	_aesni_decrypt3
	movups	%xmm0,(%edi)
	movups	%xmm1,16(%edi)
	movups	%xmm2,32(%edi)
.L006ecb_ret:
	popl	%edi
	popl	%esi
	popl	%ebx
	popl	%ebp
	ret
.size	aesni_ecb_encrypt,.-.L_aesni_ecb_encrypt_begin
.globl	aesni_cbc_encrypt
.type	aesni_cbc_encrypt,@function
.align	16
aesni_cbc_encrypt:
.L_aesni_cbc_encrypt_begin:
	pushl	%ebp
	pushl	%ebx
	pushl	%esi
	pushl	%edi
	movl	20(%esp),%esi
	movl	24(%esp),%edi
	movl	28(%esp),%eax
	movl	32(%esp),%edx
	testl	%eax,%eax
	movl	36(%esp),%ebp
	jz	.L020cbc_ret
	cmpl	$0,40(%esp)
	movups	(%ebp),%xmm5
	movl	240(%edx),%ecx
	movl	%edx,%ebp
	movl	%ecx,%ebx
	je	.L021cbc_decrypt
	movaps	%xmm5,%xmm0
	cmpl	$16,%eax
	jb	.L022cbc_enc_tail
	subl	$16,%eax
	jmp	.L023cbc_enc_loop
.align	16
.L023cbc_enc_loop:
	movups	(%esi),%xmm5
	leal	16(%esi),%esi
	pxor	%xmm5,%xmm0
	movups	(%edx),%xmm3
	movups	16(%edx),%xmm4
	leal	32(%edx),%edx
	pxor	%xmm3,%xmm0
.L024enc1_loop:
	aesenc	%xmm4,%xmm0
	decl	%ecx
	movups	(%edx),%xmm4
	leal	16(%edx),%edx
	jnz	.L024enc1_loop
	aesenclast	%xmm4,%xmm0
	subl	$16,%eax
	leal	16(%edi),%edi
	movl	%ebx,%ecx
	movl	%ebp,%edx
	movups	%xmm0,-16(%edi)
	jnc	.L023cbc_enc_loop
	addl	$16,%eax
	jnz	.L022cbc_enc_tail
	movaps	%xmm0,%xmm5
	jmp	.L020cbc_ret
.L022cbc_enc_tail:
	movl	%eax,%ecx
.long	2767451785
	movl	$16,%ecx
	subl	%eax,%ecx
	xorl	%eax,%eax
.long	2868115081
	leal	-16(%edi),%edi
	movl	%ebx,%ecx
	movl	%edi,%esi
	movl	%ebp,%edx
	jmp	.L023cbc_enc_loop
.align	16
.L021cbc_decrypt:
	subl	$64,%eax
	jbe	.L025cbc_dec_tail
	jmp	.L026cbc_dec_loop3
.align	16
.L026cbc_dec_loop3:
	movups	(%esi),%xmm0
	movups	16(%esi),%xmm1
	movups	32(%esi),%xmm2
	movaps	%xmm0,%xmm6
	movaps	%xmm1,%xmm7
	call	_aesni_decrypt3
	subl	$48,%eax
	leal	48(%esi),%esi
	leal	48(%edi),%edi
	pxor	%xmm5,%xmm0
	pxor	%xmm6,%xmm1
	movups	-16(%esi),%xmm5
	pxor	%xmm7,%xmm2
	movups	%xmm0,-48(%edi)
	movl	%ebx,%ecx
	movups	%xmm1,-32(%edi)
	movl	%ebp,%edx
	movups	%xmm2,-16(%edi)
	ja	.L026cbc_dec_loop3
.L025cbc_dec_tail:
	addl	$64,%eax
	jz	.L020cbc_ret
	movups	(%esi),%xmm0
	cmpl	$16,%eax
	movaps	%xmm0,%xmm6
	jbe	.L027cbc_dec_one
	movups	16(%esi),%xmm1
	cmpl	$32,%eax
	movaps	%xmm1,%xmm7
	jbe	.L028cbc_dec_two
	movups	32(%esi),%xmm2
	cmpl	$48,%eax
	jbe	.L029cbc_dec_three
	movups	48(%esi),%xmm7
	call	_aesni_decrypt4
	movups	16(%esi),%xmm3
	movups	32(%esi),%xmm4
	pxor	%xmm5,%xmm0
	pxor	%xmm6,%xmm1
	movups	48(%esi),%xmm5
	movups	%xmm0,(%edi)
	pxor	%xmm3,%xmm2
	pxor	%xmm4,%xmm7
	movups	%xmm1,16(%edi)
	movups	%xmm2,32(%edi)
	movaps	%xmm7,%xmm0
	leal	48(%edi),%edi
	jmp	.L030cbc_dec_tail_collected
.L027cbc_dec_one:
	movups	(%edx),%xmm3
	movups	16(%edx),%xmm4
	leal	32(%edx),%edx
	pxor	%xmm3,%xmm0
.L031dec1_loop:
	aesdec	%xmm4,%xmm0
	decl	%ecx
	movups	(%edx),%xmm4
	leal	16(%edx),%edx
	jnz	.L031dec1_loop
	aesdeclast	%xmm4,%xmm0
	pxor	%xmm5,%xmm0
	movaps	%xmm6,%xmm5
	jmp	.L030cbc_dec_tail_collected
.L028cbc_dec_two:
	call	_aesni_decrypt3
	pxor	%xmm5,%xmm0
	pxor	%xmm6,%xmm1
	movups	%xmm0,(%edi)
	movaps	%xmm1,%xmm0
	movaps	%xmm7,%xmm5
	leal	16(%edi),%edi
	jmp	.L030cbc_dec_tail_collected
.L029cbc_dec_three:
	call	_aesni_decrypt3
	pxor	%xmm5,%xmm0
	pxor	%xmm6,%xmm1
	pxor	%xmm7,%xmm2
	movups	%xmm0,(%edi)
	movups	%xmm1,16(%edi)
	movaps	%xmm2,%xmm0
	movups	32(%esi),%xmm5
	leal	32(%edi),%edi
.L030cbc_dec_tail_collected:
	andl	$15,%eax
	jnz	.L032cbc_dec_tail_partial
	movups	%xmm0,(%edi)
	jmp	.L020cbc_ret
.L032cbc_dec_tail_partial:
	movl	%esp,%ebp
	subl	$16,%esp
	andl	$-16,%esp
	movaps	%xmm0,(%esp)
	movl	%esp,%esi
	movl	%eax,%ecx
.long	2767451785
	movl	%ebp,%esp
.L020cbc_ret:
	movl	36(%esp),%ebp
	movups	%xmm5,(%ebp)
	popl	%edi
	popl	%esi
	popl	%ebx
	popl	%ebp
	ret
.size	aesni_cbc_encrypt,.-.L_aesni_cbc_encrypt_begin
.type	_aesni_set_encrypt_key,@function
.align	16
_aesni_set_encrypt_key:
	testl	%eax,%eax
	jz	.L033bad_pointer
	testl	%edx,%edx
	jz	.L033bad_pointer
	movups	(%eax),%xmm0
	pxor	%xmm4,%xmm4
	leal	16(%edx),%edx
	cmpl	$256,%ecx
	je	.L03414rounds
	cmpl	$192,%ecx
	je	.L03512rounds
	cmpl	$128,%ecx
	jne	.L036bad_keybits
.align	16
.L03710rounds:
	movl	$9,%ecx
	movups	%xmm0,-16(%edx)
	aeskeygenassist	$1,%xmm0,%xmm1
	call	.L038key_128_cold
	aeskeygenassist	$2,%xmm0,%xmm1
	call	.L039key_128
	aeskeygenassist	$4,%xmm0,%xmm1
	call	.L039key_128
	aeskeygenassist	$8,%xmm0,%xmm1
	call	.L039key_128
	aeskeygenassist	$16,%xmm0,%xmm1
	call	.L039key_128
	aeskeygenassist	$32,%xmm0,%xmm1
	call	.L039key_128
	aeskeygenassist	$64,%xmm0,%xmm1
	call	.L039key_128
	aeskeygenassist	$128,%xmm0,%xmm1
	call	.L039key_128
	aeskeygenassist	$27,%xmm0,%xmm1
	call	.L039key_128
	aeskeygenassist	$54,%xmm0,%xmm1
	call	.L039key_128
	movups	%xmm0,(%edx)
	movl	%ecx,80(%edx)
	xorl	%eax,%eax
	ret
.align	16
.L039key_128:
	movups	%xmm0,(%edx)
	leal	16(%edx),%edx
.L038key_128_cold:
	shufps	$16,%xmm0,%xmm4
	pxor	%xmm4,%xmm0
	shufps	$140,%xmm0,%xmm4
	pxor	%xmm4,%xmm0
	pshufd	$255,%xmm1,%xmm1
	pxor	%xmm1,%xmm0
	ret
.align	16
.L03512rounds:
	movq	16(%eax),%xmm2
	movl	$11,%ecx
	movups	%xmm0,-16(%edx)
	aeskeygenassist	$1,%xmm2,%xmm1
	call	.L040key_192a_cold
	aeskeygenassist	$2,%xmm2,%xmm1
	call	.L041key_192b
	aeskeygenassist	$4,%xmm2,%xmm1
	call	.L042key_192a
	aeskeygenassist	$8,%xmm2,%xmm1
	call	.L041key_192b
	aeskeygenassist	$16,%xmm2,%xmm1
	call	.L042key_192a
	aeskeygenassist	$32,%xmm2,%xmm1
	call	.L041key_192b
	aeskeygenassist	$64,%xmm2,%xmm1
	call	.L042key_192a
	aeskeygenassist	$128,%xmm2,%xmm1
	call	.L041key_192b
	movups	%xmm0,(%edx)
	movl	%ecx,48(%edx)
	xorl	%eax,%eax
	ret
.align	16
.L042key_192a:
	movups	%xmm0,(%edx)
	leal	16(%edx),%edx
.align	16
.L040key_192a_cold:
	movaps	%xmm2,%xmm5
.L043key_192b_warm:
	shufps	$16,%xmm0,%xmm4
	movaps	%xmm2,%xmm3
	pxor	%xmm4,%xmm0
	shufps	$140,%xmm0,%xmm4
	pslldq	$4,%xmm3
	pxor	%xmm4,%xmm0
	pshufd	$85,%xmm1,%xmm1
	pxor	%xmm3,%xmm2
	pxor	%xmm1,%xmm0
	pshufd	$255,%xmm0,%xmm3
	pxor	%xmm3,%xmm2
	ret
.align	16
.L041key_192b:
	movaps	%xmm0,%xmm3
	shufps	$68,%xmm0,%xmm5
	movups	%xmm5,(%edx)
	shufps	$78,%xmm2,%xmm3
	movups	%xmm3,16(%edx)
	leal	32(%edx),%edx
	jmp	.L043key_192b_warm
.align	16
.L03414rounds:
	movups	16(%eax),%xmm2
	movl	$13,%ecx
	leal	16(%edx),%edx
	movups	%xmm0,-32(%edx)
	movups	%xmm2,-16(%edx)
	aeskeygenassist	$1,%xmm2,%xmm1
	call	.L044key_256a_cold
	aeskeygenassist	$1,%xmm0,%xmm1
	call	.L045key_256b
	aeskeygenassist	$2,%xmm2,%xmm1
	call	.L046key_256a
	aeskeygenassist	$2,%xmm0,%xmm1
	call	.L045key_256b
	aeskeygenassist	$4,%xmm2,%xmm1
	call	.L046key_256a
	aeskeygenassist	$4,%xmm0,%xmm1
	call	.L045key_256b
	aeskeygenassist	$8,%xmm2,%xmm1
	call	.L046key_256a
	aeskeygenassist	$8,%xmm0,%xmm1
	call	.L045key_256b
	aeskeygenassist	$16,%xmm2,%xmm1
	call	.L046key_256a
	aeskeygenassist	$16,%xmm0,%xmm1
	call	.L045key_256b
	aeskeygenassist	$32,%xmm2,%xmm1
	call	.L046key_256a
	aeskeygenassist	$32,%xmm0,%xmm1
	call	.L045key_256b
	aeskeygenassist	$64,%xmm2,%xmm1
	call	.L046key_256a
	movups	%xmm0,(%edx)
	movl	%ecx,16(%edx)
	xorl	%eax,%eax
	ret
.align	16
.L046key_256a:
	movups	%xmm2,(%edx)
	leal	16(%edx),%edx
.L044key_256a_cold:
	shufps	$16,%xmm0,%xmm4
	pxor	%xmm4,%xmm0
	shufps	$140,%xmm0,%xmm4
	pxor	%xmm4,%xmm0
	pshufd	$255,%xmm1,%xmm1
	pxor	%xmm1,%xmm0
	ret
.align	16
.L045key_256b:
	movups	%xmm0,(%edx)
	leal	16(%edx),%edx
	shufps	$16,%xmm2,%xmm4
	pxor	%xmm4,%xmm2
	shufps	$140,%xmm2,%xmm4
	pxor	%xmm4,%xmm2
	pshufd	$170,%xmm1,%xmm1
	pxor	%xmm1,%xmm2
	ret
.align	4
.L033bad_pointer:
	movl	$-1,%eax
	ret
.align	4
.L036bad_keybits:
	movl	$-2,%eax
	ret
.size	_aesni_set_encrypt_key,.-_aesni_set_encrypt_key
.globl	aesni_set_encrypt_key
.type	aesni_set_encrypt_key,@function
.align	16
aesni_set_encrypt_key:
.L_aesni_set_encrypt_key_begin:
	movl	4(%esp),%eax
	movl	8(%esp),%ecx
	movl	12(%esp),%edx
	call	_aesni_set_encrypt_key
	ret
.size	aesni_set_encrypt_key,.-.L_aesni_set_encrypt_key_begin
.globl	aesni_set_decrypt_key
.type	aesni_set_decrypt_key,@function
.align	16
aesni_set_decrypt_key:
.L_aesni_set_decrypt_key_begin:
	movl	4(%esp),%eax
	movl	8(%esp),%ecx
	movl	12(%esp),%edx
	call	_aesni_set_encrypt_key
	movl	12(%esp),%edx
	shll	$4,%ecx
	testl	%eax,%eax
	jnz	.L047dec_key_ret
	leal	16(%edx,%ecx,1),%eax
	movups	(%edx),%xmm0
	movups	(%eax),%xmm1
	movups	%xmm0,(%eax)
	movups	%xmm1,(%edx)
	leal	16(%edx),%edx
	leal	-16(%eax),%eax
.L048dec_key_inverse:
	movups	(%edx),%xmm0
	movups	(%eax),%xmm1
	aesimc	%xmm0,%xmm0
	aesimc	%xmm1,%xmm1
	leal	16(%edx),%edx
	leal	-16(%eax),%eax
	cmpl	%edx,%eax
	movups	%xmm0,16(%eax)
	movups	%xmm1,-16(%edx)
	ja	.L048dec_key_inverse
	movups	(%edx),%xmm0
	aesimc	%xmm0,%xmm0
	movups	%xmm0,(%edx)
	xorl	%eax,%eax
.L047dec_key_ret:
	ret
.size	aesni_set_decrypt_key,.-.L_aesni_set_decrypt_key_begin
.byte	65,69,83,32,102,111,114,32,73,110,116,101,108,32,65,69
.byte	83,45,78,73,44,32,67,82,89,80,84,79,71,65,77,83
.byte	32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115
.byte	115,108,46,111,114,103,62,0

File Added: src/crypto/external/bsd/openssl/lib/libcrypto/arch/i386/cmll-x86.S
.file	"cmll-586.s"
.text
.globl	Camellia_EncryptBlock_Rounds
.type	Camellia_EncryptBlock_Rounds,@function
.align	16
Camellia_EncryptBlock_Rounds:
.L_Camellia_EncryptBlock_Rounds_begin:
	pushl	%ebp
	pushl	%ebx
	pushl	%esi
	pushl	%edi
	movl	20(%esp),%eax
	movl	24(%esp),%esi
	movl	28(%esp),%edi
	movl	%esp,%ebx
	subl	$28,%esp
	andl	$-64,%esp
	leal	-127(%edi),%ecx
	subl	%esp,%ecx
	negl	%ecx
	andl	$960,%ecx
	subl	%ecx,%esp
	addl	$4,%esp
	shll	$6,%eax
	leal	(%edi,%eax,1),%eax
	movl	%ebx,20(%esp)
	movl	%eax,16(%esp)
	call	.L000pic_point
.L000pic_point:
	popl	%ebp
	leal	.LCamellia_SBOX-.L000pic_point(%ebp),%ebp
	movl	(%esi),%eax
	movl	4(%esi),%ebx
	movl	8(%esi),%ecx
	bswap	%eax
	movl	12(%esi),%edx
	bswap	%ebx
	bswap	%ecx
	bswap	%edx
	call	_x86_Camellia_encrypt
	movl	20(%esp),%esp
	bswap	%eax
	movl	32(%esp),%esi
	bswap	%ebx
	bswap	%ecx
	bswap	%edx
	movl	%eax,(%esi)
	movl	%ebx,4(%esi)
	movl	%ecx,8(%esi)
	movl	%edx,12(%esi)
	popl	%edi
	popl	%esi
	popl	%ebx
	popl	%ebp
	ret
.size	Camellia_EncryptBlock_Rounds,.-.L_Camellia_EncryptBlock_Rounds_begin
.globl	Camellia_EncryptBlock
.type	Camellia_EncryptBlock,@function
.align	16
Camellia_EncryptBlock:
.L_Camellia_EncryptBlock_begin:
	movl	$128,%eax
	subl	4(%esp),%eax
	movl	$3,%eax
	adcl	$0,%eax
	movl	%eax,4(%esp)
	jmp	.L_Camellia_EncryptBlock_Rounds_begin
.size	Camellia_EncryptBlock,.-.L_Camellia_EncryptBlock_begin
.globl	Camellia_encrypt
.type	Camellia_encrypt,@function
.align	16
Camellia_encrypt:
.L_Camellia_encrypt_begin:
	pushl	%ebp
	pushl	%ebx
	pushl	%esi
	pushl	%edi
	movl	20(%esp),%esi
	movl	28(%esp),%edi
	movl	%esp,%ebx
	subl	$28,%esp
	andl	$-64,%esp
	movl	272(%edi),%eax
	leal	-127(%edi),%ecx
	subl	%esp,%ecx
	negl	%ecx
	andl	$960,%ecx
	subl	%ecx,%esp
	addl	$4,%esp
	shll	$6,%eax
	leal	(%edi,%eax,1),%eax
	movl	%ebx,20(%esp)
	movl	%eax,16(%esp)
	call	.L001pic_point
.L001pic_point:
	popl	%ebp
	leal	.LCamellia_SBOX-.L001pic_point(%ebp),%ebp
	movl	(%esi),%eax
	movl	4(%esi),%ebx
	movl	8(%esi),%ecx
	bswap	%eax
	movl	12(%esi),%edx
	bswap	%ebx
	bswap	%ecx
	bswap	%edx
	call	_x86_Camellia_encrypt
	movl	20(%esp),%esp
	bswap	%eax
	movl	24(%esp),%esi
	bswap	%ebx
	bswap	%ecx
	bswap	%edx
	movl	%eax,(%esi)
	movl	%ebx,4(%esi)
	movl	%ecx,8(%esi)
	movl	%edx,12(%esi)
	popl	%edi
	popl	%esi
	popl	%ebx
	popl	%ebp
	ret
.size	Camellia_encrypt,.-.L_Camellia_encrypt_begin
.type	_x86_Camellia_encrypt,@function
.align	16
_x86_Camellia_encrypt:
	xorl	(%edi),%eax
	xorl	4(%edi),%ebx
	xorl	8(%edi),%ecx
	xorl	12(%edi),%edx
	movl	16(%edi),%esi
	movl	%eax,4(%esp)
	movl	%ebx,8(%esp)
	movl	%ecx,12(%esp)
	movl	%edx,16(%esp)
.align	16
.L002loop:
	xorl	%esi,%eax
	xorl	20(%edi),%ebx
	movzbl	%ah,%esi
	movl	2052(%ebp,%esi,8),%edx
	movzbl	%al,%esi
	xorl	4(%ebp,%esi,8),%edx
	shrl	$16,%eax
	movzbl	%bl,%esi
	movl	(%ebp,%esi,8),%ecx
	movzbl	%ah,%esi
	xorl	(%ebp,%esi,8),%edx
	movzbl	%bh,%esi
	xorl	4(%ebp,%esi,8),%ecx
	shrl	$16,%ebx
	movzbl	%al,%eax
	xorl	2048(%ebp,%eax,8),%edx
	movzbl	%bh,%esi
	movl	16(%esp),%eax
	xorl	%edx,%ecx
	rorl	$8,%edx
	xorl	2048(%ebp,%esi,8),%ecx
	movzbl	%bl,%esi
	movl	12(%esp),%ebx
	xorl	%eax,%edx
	xorl	2052(%ebp,%esi,8),%ecx
	movl	24(%edi),%esi
	xorl	%ecx,%edx
	movl	%edx,16(%esp)
	xorl	%ebx,%ecx
	movl	%ecx,12(%esp)
	xorl	%esi,%ecx
	xorl	28(%edi),%edx
	movzbl	%ch,%esi
	movl	2052(%ebp,%esi,8),%ebx
	movzbl	%cl,%esi
	xorl	4(%ebp,%esi,8),%ebx
	shrl	$16,%ecx
	movzbl	%dl,%esi
	movl	(%ebp,%esi,8),%eax
	movzbl	%ch,%esi
	xorl	(%ebp,%esi,8),%ebx
	movzbl	%dh,%esi
	xorl	4(%ebp,%esi,8),%eax
	shrl	$16,%edx
	movzbl	%cl,%ecx
	xorl	2048(%ebp,%ecx,8),%ebx
	movzbl	%dh,%esi
	movl	8(%esp),%ecx
	xorl	%ebx,%eax
	rorl	$8,%ebx
	xorl	2048(%ebp,%esi,8),%eax
	movzbl	%dl,%esi
	movl	4(%esp),%edx
	xorl	%ecx,%ebx
	xorl	2052(%ebp,%esi,8),%eax
	movl	32(%edi),%esi
	xorl	%eax,%ebx
	movl	%ebx,8(%esp)
	xorl	%edx,%eax
	movl	%eax,4(%esp)
	xorl	%esi,%eax
	xorl	36(%edi),%ebx
	movzbl	%ah,%esi
	movl	2052(%ebp,%esi,8),%edx
	movzbl	%al,%esi
	xorl	4(%ebp,%esi,8),%edx
	shrl	$16,%eax
	movzbl	%bl,%esi
	movl	(%ebp,%esi,8),%ecx
	movzbl	%ah,%esi
	xorl	(%ebp,%esi,8),%edx
	movzbl	%bh,%esi
	xorl	4(%ebp,%esi,8),%ecx
	shrl	$16,%ebx
	movzbl	%al,%eax
	xorl	2048(%ebp,%eax,8),%edx
	movzbl	%bh,%esi
	movl	16(%esp),%eax
	xorl	%edx,%ecx
	rorl	$8,%edx
	xorl	2048(%ebp,%esi,8),%ecx
	movzbl	%bl,%esi
	movl	12(%esp),%ebx
	xorl	%eax,%edx
	xorl	2052(%ebp,%esi,8),%ecx
	movl	40(%edi),%esi
	xorl	%ecx,%edx
	movl	%edx,16(%esp)
	xorl	%ebx,%ecx
	movl	%ecx,12(%esp)
	xorl	%esi,%ecx
	xorl	44(%edi),%edx
	movzbl	%ch,%esi
	movl	2052(%ebp,%esi,8),%ebx
	movzbl	%cl,%esi
	xorl	4(%ebp,%esi,8),%ebx
	shrl	$16,%ecx
	movzbl	%dl,%esi
	movl	(%ebp,%esi,8),%eax
	movzbl	%ch,%esi
	xorl	(%ebp,%esi,8),%ebx
	movzbl	%dh,%esi
	xorl	4(%ebp,%esi,8),%eax
	shrl	$16,%edx
	movzbl	%cl,%ecx
	xorl	2048(%ebp,%ecx,8),%ebx
	movzbl	%dh,%esi
	movl	8(%esp),%ecx
	xorl	%ebx,%eax
	rorl	$8,%ebx
	xorl	2048(%ebp,%esi,8),%eax
	movzbl	%dl,%esi
	movl	4(%esp),%edx
	xorl	%ecx,%ebx
	xorl	2052(%ebp,%esi,8),%eax
	movl	48(%edi),%esi
	xorl	%eax,%ebx
	movl	%ebx,8(%esp)
	xorl	%edx,%eax
	movl	%eax,4(%esp)
	xorl	%esi,%eax
	xorl	52(%edi),%ebx
	movzbl	%ah,%esi
	movl	2052(%ebp,%esi,8),%edx
	movzbl	%al,%esi
	xorl	4(%ebp,%esi,8),%edx
	shrl	$16,%eax
	movzbl	%bl,%esi
	movl	(%ebp,%esi,8),%ecx
	movzbl	%ah,%esi
	xorl	(%ebp,%esi,8),%edx
	movzbl	%bh,%esi
	xorl	4(%ebp,%esi,8),%ecx
	shrl	$16,%ebx
	movzbl	%al,%eax
	xorl	2048(%ebp,%eax,8),%edx
	movzbl	%bh,%esi
	movl	16(%esp),%eax
	xorl	%edx,%ecx
	rorl	$8,%edx
	xorl	2048(%ebp,%esi,8),%ecx
	movzbl	%bl,%esi
	movl	12(%esp),%ebx
	xorl	%eax,%edx
	xorl	2052(%ebp,%esi,8),%ecx
	movl	56(%edi),%esi
	xorl	%ecx,%edx
	movl	%edx,16(%esp)
	xorl	%ebx,%ecx
	movl	%ecx,12(%esp)
	xorl	%esi,%ecx
	xorl	60(%edi),%edx
	movzbl	%ch,%esi
	movl	2052(%ebp,%esi,8),%ebx
	movzbl	%cl,%esi
	xorl	4(%ebp,%esi,8),%ebx
	shrl	$16,%ecx
	movzbl	%dl,%esi
	movl	(%ebp,%esi,8),%eax
	movzbl	%ch,%esi
	xorl	(%ebp,%esi,8),%ebx
	movzbl	%dh,%esi
	xorl	4(%ebp,%esi,8),%eax
	shrl	$16,%edx
	movzbl	%cl,%ecx
	xorl	2048(%ebp,%ecx,8),%ebx
	movzbl	%dh,%esi
	movl	8(%esp),%ecx
	xorl	%ebx,%eax
	rorl	$8,%ebx
	xorl	2048(%ebp,%esi,8),%eax
	movzbl	%dl,%esi
	movl	4(%esp),%edx
	xorl	%ecx,%ebx
	xorl	2052(%ebp,%esi,8),%eax
	movl	64(%edi),%esi
	xorl	%eax,%ebx
	movl	%ebx,8(%esp)
	xorl	%edx,%eax
	movl	%eax,4(%esp)
	addl	$64,%edi
	cmpl	20(%esp),%edi
	je	.L003done
	andl	%eax,%esi
	movl	16(%esp),%edx
	roll	$1,%esi
	movl	%edx,%ecx
	xorl	%esi,%ebx
	orl	12(%edi),%ecx
	movl	%ebx,8(%esp)
	xorl	12(%esp),%ecx
	movl	4(%edi),%esi
	movl	%ecx,12(%esp)
	orl	%ebx,%esi
	andl	8(%edi),%ecx
	xorl	%esi,%eax
	roll	$1,%ecx
	movl	%eax,4(%esp)
	xorl	%ecx,%edx
	movl	16(%edi),%esi
	movl	%edx,16(%esp)
	jmp	.L002loop
.align	8
.L003done:
	movl	%eax,%ecx
	movl	%ebx,%edx
	movl	12(%esp),%eax
	movl	16(%esp),%ebx
	xorl	%esi,%eax
	xorl	4(%edi),%ebx
	xorl	8(%edi),%ecx
	xorl	12(%edi),%edx
	ret
.size	_x86_Camellia_encrypt,.-_x86_Camellia_encrypt
.globl	Camellia_DecryptBlock_Rounds
.type	Camellia_DecryptBlock_Rounds,@function
.align	16
Camellia_DecryptBlock_Rounds:
.L_Camellia_DecryptBlock_Rounds_begin:
	pushl	%ebp
	pushl	%ebx
	pushl	%esi
	pushl	%edi
	movl	20(%esp),%eax
	movl	24(%esp),%esi
	movl	28(%esp),%edi
	movl	%esp,%ebx
	subl	$28,%esp
	andl	$-64,%esp
	leal	-127(%edi),%ecx
	subl	%esp,%ecx
	negl	%ecx
	andl	$960,%ecx
	subl	%ecx,%esp
	addl	$4,%esp
	shll	$6,%eax
	movl	%edi,16(%esp)
	leal	(%edi,%eax,1),%edi
	movl	%ebx,20(%esp)
	call	.L004pic_point
.L004pic_point:
	popl	%ebp
	leal	.LCamellia_SBOX-.L004pic_point(%ebp),%ebp
	movl	(%esi),%eax
	movl	4(%esi),%ebx
	movl	8(%esi),%ecx
	bswap	%eax
	movl	12(%esi),%edx
	bswap	%ebx
	bswap	%ecx
	bswap	%edx
	call	_x86_Camellia_decrypt
	movl	20(%esp),%esp
	bswap	%eax
	movl	32(%esp),%esi
	bswap	%ebx
	bswap	%ecx
	bswap	%edx
	movl	%eax,(%esi)
	movl	%ebx,4(%esi)
	movl	%ecx,8(%esi)
	movl	%edx,12(%esi)
	popl	%edi
	popl	%esi
	popl	%ebx
	popl	%ebp
	ret
.size	Camellia_DecryptBlock_Rounds,.-.L_Camellia_DecryptBlock_Rounds_begin
.globl	Camellia_DecryptBlock
.type	Camellia_DecryptBlock,@function
.align	16
Camellia_DecryptBlock:
.L_Camellia_DecryptBlock_begin:
	movl	$128,%eax
	subl	4(%esp),%eax
	movl	$3,%eax
	adcl	$0,%eax
	movl	%eax,4(%esp)
	jmp	.L_Camellia_DecryptBlock_Rounds_begin
.size	Camellia_DecryptBlock,.-.L_Camellia_DecryptBlock_begin
.globl	Camellia_decrypt
.type	Camellia_decrypt,@function
.align	16
Camellia_decrypt:
.L_Camellia_decrypt_begin:
	pushl	%ebp
	pushl	%ebx
	pushl	%esi
	pushl	%edi
	movl	20(%esp),%esi
	movl	28(%esp),%edi
	movl	%esp,%ebx
	subl	$28,%esp
	andl	$-64,%esp
	movl	272(%edi),%eax
	leal	-127(%edi),%ecx
	subl	%esp,%ecx
	negl	%ecx
	andl	$960,%ecx
	subl	%ecx,%esp
	addl	$4,%esp
	shll	$6,%eax
	movl	%edi,16(%esp)
	leal	(%edi,%eax,1),%edi
	movl	%ebx,20(%esp)
	call	.L005pic_point
.L005pic_point:
	popl	%ebp
	leal	.LCamellia_SBOX-.L005pic_point(%ebp),%ebp
	movl	(%esi),%eax
	movl	4(%esi),%ebx
	movl	8(%esi),%ecx
	bswap	%eax
	movl	12(%esi),%edx
	bswap	%ebx
	bswap	%ecx
	bswap	%edx
	call	_x86_Camellia_decrypt
	movl	20(%esp),%esp
	bswap	%eax
	movl	24(%esp),%esi
	bswap	%ebx
	bswap	%ecx
	bswap	%edx
	movl	%eax,(%esi)
	movl	%ebx,4(%esi)
	movl	%ecx,8(%esi)
	movl	%edx,12(%esi)
	popl	%edi
	popl	%esi
	popl	%ebx
	popl	%ebp
	ret
.size	Camellia_decrypt,.-.L_Camellia_decrypt_begin
.type	_x86_Camellia_decrypt,@function
.align	16
_x86_Camellia_decrypt:
	xorl	(%edi),%eax
	xorl	4(%edi),%ebx
	xorl	8(%edi),%ecx
	xorl	12(%edi),%edx
	movl	-8(%edi),%esi
	movl	%eax,4(%esp)
	movl	%ebx,8(%esp)
	movl	%ecx,12(%esp)
	movl	%edx,16(%esp)
.align	16
.L006loop:
	xorl	%esi,%eax
	xorl	-4(%edi),%ebx
	movzbl	%ah,%esi
	movl	2052(%ebp,%esi,8),%edx
	movzbl	%al,%esi
	xorl	4(%ebp,%esi,8),%edx
	shrl	$16,%eax
	movzbl	%bl,%esi
	movl	(%ebp,%esi,8),%ecx
	movzbl	%ah,%esi
	xorl	(%ebp,%esi,8),%edx
	movzbl	%bh,%esi
	xorl	4(%ebp,%esi,8),%ecx
	shrl	$16,%ebx
	movzbl	%al,%eax
	xorl	2048(%ebp,%eax,8),%edx
	movzbl	%bh,%esi
	movl	16(%esp),%eax
	xorl	%edx,%ecx
	rorl	$8,%edx
	xorl	2048(%ebp,%esi,8),%ecx
	movzbl	%bl,%esi
	movl	12(%esp),%ebx
	xorl	%eax,%edx
	xorl	2052(%ebp,%esi,8),%ecx
	movl	-16(%edi),%esi
	xorl	%ecx,%edx
	movl	%edx,16(%esp)
	xorl	%ebx,%ecx
	movl	%ecx,12(%esp)
	xorl	%esi,%ecx
	xorl	-12(%edi),%edx
	movzbl	%ch,%esi
	movl	2052(%ebp,%esi,8),%ebx
	movzbl	%cl,%esi
	xorl	4(%ebp,%esi,8),%ebx
	shrl	$16,%ecx
	movzbl	%dl,%esi
	movl	(%ebp,%esi,8),%eax
	movzbl	%ch,%esi
	xorl	(%ebp,%esi,8),%ebx
	movzbl	%dh,%esi
	xorl	4(%ebp,%esi,8),%eax
	shrl	$16,%edx
	movzbl	%cl,%ecx
	xorl	2048(%ebp,%ecx,8),%ebx
	movzbl	%dh,%esi
	movl	8(%esp),%ecx
	xorl	%ebx,%eax
	rorl	$8,%ebx
	xorl	2048(%ebp,%esi,8),%eax
	movzbl	%dl,%esi
	movl	4(%esp),%edx
	xorl	%ecx,%ebx
	xorl	2052(%ebp,%esi,8),%eax
	movl	-24(%edi),%esi
	xorl	%eax,%ebx
	movl	%ebx,8(%esp)
	xorl	%edx,%eax
	movl	%eax,4(%esp)
	xorl	%esi,%eax
	xorl	-20(%edi),%ebx
	movzbl	%ah,%esi
	movl	2052(%ebp,%esi,8),%edx
	movzbl	%al,%esi
	xorl	4(%ebp,%esi,8),%edx
	shrl	$16,%eax
	movzbl	%bl,%esi
	movl	(%ebp,%esi,8),%ecx
	movzbl	%ah,%esi
	xorl	(%ebp,%esi,8),%edx
	movzbl	%bh,%esi
	xorl	4(%ebp,%esi,8),%ecx
	shrl	$16,%ebx
	movzbl	%al,%eax
	xorl	2048(%ebp,%eax,8),%edx
	movzbl	%bh,%esi
	movl	16(%esp),%eax
	xorl	%edx,%ecx
	rorl	$8,%edx
	xorl	2048(%ebp,%esi,8),%ecx
	movzbl	%bl,%esi
	movl	12(%esp),%ebx
	xorl	%eax,%edx
	xorl	2052(%ebp,%esi,8),%ecx
	movl	-32(%edi),%esi
	xorl	%ecx,%edx
	movl	%edx,16(%esp)
	xorl	%ebx,%ecx
	movl	%ecx,12(%esp)
	xorl	%esi,%ecx
	xorl	-28(%edi),%edx
	movzbl	%ch,%esi
	movl	2052(%ebp,%esi,8),%ebx
	movzbl	%cl,%esi
	xorl	4(%ebp,%esi,8),%ebx
	shrl	$16,%ecx
	movzbl	%dl,%esi
	movl	(%ebp,%esi,8),%eax
	movzbl	%ch,%esi
	xorl	(%ebp,%esi,8),%ebx
	movzbl	%dh,%esi
	xorl	4(%ebp,%esi,8),%eax
	shrl	$16,%edx
	movzbl	%cl,%ecx
	xorl	2048(%ebp,%ecx,8),%ebx
	movzbl	%dh,%esi
	movl	8(%esp),%ecx
	xorl	%ebx,%eax
	rorl	$8,%ebx
	xorl	2048(%ebp,%esi,8),%eax
	movzbl	%dl,%esi
	movl	4(%esp),%edx
	xorl	%ecx,%ebx
	xorl	2052(%ebp,%esi,8),%eax
	movl	-40(%edi),%esi
	xorl	%eax,%ebx
	movl	%ebx,8(%esp)
	xorl	%edx,%eax
	movl	%eax,4(%esp)
	xorl	%esi,%eax
	xorl	-36(%edi),%ebx
	movzbl	%ah,%esi
	movl	2052(%ebp,%esi,8),%edx
	movzbl	%al,%esi
	xorl	4(%ebp,%esi,8),%edx
	shrl	$16,%eax
	movzbl	%bl,%esi
	movl	(%ebp,%esi,8),%ecx
	movzbl	%ah,%esi
	xorl	(%ebp,%esi,8),%edx
	movzbl	%bh,%esi
	xorl	4(%ebp,%esi,8),%ecx
	shrl	$16,%ebx
	movzbl	%al,%eax
	xorl	2048(%ebp,%eax,8),%edx
	movzbl	%bh,%esi
	movl	16(%esp),%eax
	xorl	%edx,%ecx
	rorl	$8,%edx
	xorl	2048(%ebp,%esi,8),%ecx
	movzbl	%bl,%esi
	movl	12(%esp),%ebx
	xorl	%eax,%edx
	xorl	2052(%ebp,%esi,8),%ecx
	movl	-48(%edi),%esi
	xorl	%ecx,%edx
	movl	%edx,16(%esp)
	xorl	%ebx,%ecx
	movl	%ecx,12(%esp)
	xorl	%esi,%ecx
	xorl	-44(%edi),%edx
	movzbl	%ch,%esi
	movl	2052(%ebp,%esi,8),%ebx
	movzbl	%cl,%esi
	xorl	4(%ebp,%esi,8),%ebx
	shrl	$16,%ecx
	movzbl	%dl,%esi
	movl	(%ebp,%esi,8),%eax
	movzbl	%ch,%esi
	xorl	(%ebp,%esi,8),%ebx
	movzbl	%dh,%esi
	xorl	4(%ebp,%esi,8),%eax
	shrl	$16,%edx
	movzbl	%cl,%ecx
	xorl	2048(%ebp,%ecx,8),%ebx
	movzbl	%dh,%esi
	movl	8(%esp),%ecx
	xorl	%ebx,%eax
	rorl	$8,%ebx
	xorl	2048(%ebp,%esi,8),%eax
	movzbl	%dl,%esi
	movl	4(%esp),%edx
	xorl	%ecx,%ebx
	xorl	2052(%ebp,%esi,8),%eax
	movl	-56(%edi),%esi
	xorl	%eax,%ebx
	movl	%ebx,8(%esp)
	xorl	%edx,%eax
	movl	%eax,4(%esp)
	subl	$64,%edi
	cmpl	20(%esp),%edi
	je	.L007done
	andl	%eax,%esi
	movl	16(%esp),%edx
	roll	$1,%esi
	movl	%edx,%ecx
	xorl	%esi,%ebx
	orl	4(%edi),%ecx
	movl	%ebx,8(%esp)
	xorl	12(%esp),%ecx
	movl	12(%edi),%esi
	movl	%ecx,12(%esp)
	orl	%ebx,%esi
	andl	(%edi),%ecx
	xorl	%esi,%eax
	roll	$1,%ecx
	movl	%eax,4(%esp)
	xorl	%ecx,%edx
	movl	-8(%edi),%esi
	movl	%edx,16(%esp)
	jmp	.L006loop
.align	8
.L007done:
	movl	%eax,%ecx
	movl	%ebx,%edx
	movl	12(%esp),%eax
	movl	16(%esp),%ebx
	xorl	%esi,%ecx
	xorl	12(%edi),%edx
	xorl	(%edi),%eax
	xorl	4(%edi),%ebx
	ret
.size	_x86_Camellia_decrypt,.-_x86_Camellia_decrypt
.globl	Camellia_Ekeygen
.type	Camellia_Ekeygen,@function
.align	16
Camellia_Ekeygen:
.L_Camellia_Ekeygen_begin:
	pushl	%ebp
	pushl	%ebx
	pushl	%esi
	pushl	%edi
	subl	$16,%esp
	movl	36(%esp),%ebp
	movl	40(%esp),%esi
	movl	44(%esp),%edi
	movl	(%esi),%eax
	movl	4(%esi),%ebx
	movl	8(%esi),%ecx
	movl	12(%esi),%edx
	bswap	%eax
	bswap	%ebx
	bswap	%ecx
	bswap	%edx
	movl	%eax,(%edi)
	movl	%ebx,4(%edi)
	movl	%ecx,8(%edi)
	movl	%edx,12(%edi)
	cmpl	$128,%ebp
	je	.L0081st128
	movl	16(%esi),%eax
	movl	20(%esi),%ebx
	cmpl	$192,%ebp
	je	.L0091st192
	movl	24(%esi),%ecx
	movl	28(%esi),%edx
	jmp	.L0101st256
.align	4
.L0091st192:
	movl	%eax,%ecx
	movl	%ebx,%edx
	notl	%ecx
	notl	%edx
.align	4
.L0101st256:
	bswap	%eax
	bswap	%ebx
	bswap	%ecx
	bswap	%edx
	movl	%eax,32(%edi)
	movl	%ebx,36(%edi)
	movl	%ecx,40(%edi)
	movl	%edx,44(%edi)
	xorl	(%edi),%eax
	xorl	4(%edi),%ebx
	xorl	8(%edi),%ecx
	xorl	12(%edi),%edx
.align	4
.L0081st128:
	call	.L011pic_point
.L011pic_point:
	popl	%ebp
	leal	.LCamellia_SBOX-.L011pic_point(%ebp),%ebp
	leal	.LCamellia_SIGMA-.LCamellia_SBOX(%ebp),%edi
	movl	(%edi),%esi
	movl	%eax,(%esp)
	movl	%ebx,4(%esp)
	movl	%ecx,8(%esp)
	movl	%edx,12(%esp)
	xorl	%esi,%eax
	xorl	4(%edi),%ebx
	movzbl	%ah,%esi
	movl	2052(%ebp,%esi,8),%edx
	movzbl	%al,%esi
	xorl	4(%ebp,%esi,8),%edx
	shrl	$16,%eax
	movzbl	%bl,%esi
	movl	(%ebp,%esi,8),%ecx
	movzbl	%ah,%esi
	xorl	(%ebp,%esi,8),%edx
	movzbl	%bh,%esi
	xorl	4(%ebp,%esi,8),%ecx
	shrl	$16,%ebx
	movzbl	%al,%eax
	xorl	2048(%ebp,%eax,8),%edx
	movzbl	%bh,%esi
	movl	12(%esp),%eax
	xorl	%edx,%ecx
	rorl	$8,%edx
	xorl	2048(%ebp,%esi,8),%ecx
	movzbl	%bl,%esi
	movl	8(%esp),%ebx
	xorl	%eax,%edx
	xorl	2052(%ebp,%esi,8),%ecx
	movl	8(%edi),%esi
	xorl	%ecx,%edx
	movl	%edx,12(%esp)
	xorl	%ebx,%ecx
	movl	%ecx,8(%esp)
	xorl	%esi,%ecx
	xorl	12(%edi),%edx
	movzbl	%ch,%esi
	movl	2052(%ebp,%esi,8),%ebx
	movzbl	%cl,%esi
	xorl	4(%ebp,%esi,8),%ebx
	shrl	$16,%ecx
	movzbl	%dl,%esi
	movl	(%ebp,%esi,8),%eax
	movzbl	%ch,%esi
	xorl	(%ebp,%esi,8),%ebx
	movzbl	%dh,%esi
	xorl	4(%ebp,%esi,8),%eax
	shrl	$16,%edx
	movzbl	%cl,%ecx
	xorl	2048(%ebp,%ecx,8),%ebx
	movzbl	%dh,%esi
	movl	4(%esp),%ecx
	xorl	%ebx,%eax
	rorl	$8,%ebx
	xorl	2048(%ebp,%esi,8),%eax
	movzbl	%dl,%esi
	movl	(%esp),%edx
	xorl	%ecx,%ebx
	xorl	2052(%ebp,%esi,8),%eax
	movl	16(%edi),%esi
	xorl	%eax,%ebx
	movl	%ebx,4(%esp)
	xorl	%edx,%eax
	movl	%eax,(%esp)
	movl	8(%esp),%ecx
	movl	12(%esp),%edx
	movl	44(%esp),%esi
	xorl	(%esi),%eax
	xorl	4(%esi),%ebx
	xorl	8(%esi),%ecx
	xorl	12(%esi),%edx
	movl	16(%edi),%esi
	movl	%eax,(%esp)
	movl	%ebx,4(%esp)
	movl	%ecx,8(%esp)
	movl	%edx,12(%esp)
	xorl	%esi,%eax
	xorl	20(%edi),%ebx
	movzbl	%ah,%esi
	movl	2052(%ebp,%esi,8),%edx
	movzbl	%al,%esi
	xorl	4(%ebp,%esi,8),%edx
	shrl	$16,%eax
	movzbl	%bl,%esi
	movl	(%ebp,%esi,8),%ecx
	movzbl	%ah,%esi
	xorl	(%ebp,%esi,8),%edx
	movzbl	%bh,%esi
	xorl	4(%ebp,%esi,8),%ecx
	shrl	$16,%ebx
	movzbl	%al,%eax
	xorl	2048(%ebp,%eax,8),%edx
	movzbl	%bh,%esi
	movl	12(%esp),%eax
	xorl	%edx,%ecx
	rorl	$8,%edx
	xorl	2048(%ebp,%esi,8),%ecx
	movzbl	%bl,%esi
	movl	8(%esp),%ebx
	xorl	%eax,%edx
	xorl	2052(%ebp,%esi,8),%ecx
	movl	24(%edi),%esi
	xorl	%ecx,%edx
	movl	%edx,12(%esp)
	xorl	%ebx,%ecx
	movl	%ecx,8(%esp)
	xorl	%esi,%ecx
	xorl	28(%edi),%edx
	movzbl	%ch,%esi
	movl	2052(%ebp,%esi,8),%ebx
	movzbl	%cl,%esi
	xorl	4(%ebp,%esi,8),%ebx
	shrl	$16,%ecx
	movzbl	%dl,%esi
	movl	(%ebp,%esi,8),%eax
	movzbl	%ch,%esi
	xorl	(%ebp,%esi,8),%ebx
	movzbl	%dh,%esi
	xorl	4(%ebp,%esi,8),%eax
	shrl	$16,%edx
	movzbl	%cl,%ecx
	xorl	2048(%ebp,%ecx,8),%ebx
	movzbl	%dh,%esi
	movl	4(%esp),%ecx
	xorl	%ebx,%eax
	rorl	$8,%ebx
	xorl	2048(%ebp,%esi,8),%eax
	movzbl	%dl,%esi
	movl	(%esp),%edx
	xorl	%ecx,%ebx
	xorl	2052(%ebp,%esi,8),%eax
	movl	32(%edi),%esi
	xorl	%eax,%ebx
	movl	%ebx,4(%esp)
	xorl	%edx,%eax
	movl	%eax,(%esp)
	movl	8(%esp),%ecx
	movl	12(%esp),%edx
	movl	36(%esp),%esi
	cmpl	$128,%esi
	jne	.L0122nd256
	movl	44(%esp),%edi
	leal	128(%edi),%edi
	movl	%eax,-112(%edi)
	movl	%ebx,-108(%edi)
	movl	%ecx,-104(%edi)
	movl	%edx,-100(%edi)
	movl	%eax,%ebp
	shll	$15,%eax
	movl	%ebx,%esi
	shrl	$17,%esi
	shll	$15,%ebx
	orl	%esi,%eax
	movl	%ecx,%esi
	shll	$15,%ecx
	movl	%eax,-80(%edi)
	shrl	$17,%esi
	orl	%esi,%ebx
	shrl	$17,%ebp
	movl	%edx,%esi
	shrl	$17,%esi
	movl	%ebx,-76(%edi)
	shll	$15,%edx
	orl	%esi,%ecx
	orl	%ebp,%edx
	movl	%ecx,-72(%edi)
	movl	%edx,-68(%edi)
	movl	%eax,%ebp
	shll	$15,%eax
	movl	%ebx,%esi
	shrl	$17,%esi
	shll	$15,%ebx
	orl	%esi,%eax
	movl	%ecx,%esi
	shll	$15,%ecx
	movl	%eax,-64(%edi)
	shrl	$17,%esi
	orl	%esi,%ebx
	shrl	$17,%ebp
	movl	%edx,%esi
	shrl	$17,%esi
	movl	%ebx,-60(%edi)
	shll	$15,%edx
	orl	%esi,%ecx
	orl	%ebp,%edx
	movl	%ecx,-56(%edi)
	movl	%edx,-52(%edi)
	movl	%eax,%ebp
	shll	$15,%eax
	movl	%ebx,%esi
	shrl	$17,%esi
	shll	$15,%ebx
	orl	%esi,%eax
	movl	%ecx,%esi
	shll	$15,%ecx
	movl	%eax,-32(%edi)
	shrl	$17,%esi
	orl	%esi,%ebx
	shrl	$17,%ebp
	movl	%edx,%esi
	shrl	$17,%esi
	movl	%ebx,-28(%edi)
	shll	$15,%edx
	orl	%esi,%ecx
	orl	%ebp,%edx
	movl	%eax,%ebp
	shll	$15,%eax
	movl	%ebx,%esi
	shrl	$17,%esi
	shll	$15,%ebx
	orl	%esi,%eax
	movl	%ecx,%esi
	shll	$15,%ecx
	movl	%eax,-16(%edi)
	shrl	$17,%esi
	orl	%esi,%ebx
	shrl	$17,%ebp
	movl	%edx,%esi
	shrl	$17,%esi
	movl	%ebx,-12(%edi)
	shll	$15,%edx
	orl	%esi,%ecx
	orl	%ebp,%edx
	movl	%ecx,-8(%edi)
	movl	%edx,-4(%edi)
	movl	%ebx,%ebp
	shll	$2,%ebx
	movl	%ecx,%esi
	shrl	$30,%esi
	shll	$2,%ecx
	orl	%esi,%ebx
	movl	%edx,%esi
	shll	$2,%edx
	movl	%ebx,32(%edi)
	shrl	$30,%esi
	orl	%esi,%ecx
	shrl	$30,%ebp
	movl	%eax,%esi
	shrl	$30,%esi
	movl	%ecx,36(%edi)
	shll	$2,%eax
	orl	%esi,%edx
	orl	%ebp,%eax
	movl	%edx,40(%edi)
	movl	%eax,44(%edi)
	movl	%ebx,%ebp
	shll	$17,%ebx
	movl	%ecx,%esi
	shrl	$15,%esi
	shll	$17,%ecx
	orl	%esi,%ebx
	movl	%edx,%esi
	shll	$17,%edx
	movl	%ebx,64(%edi)
	shrl	$15,%esi
	orl	%esi,%ecx
	shrl	$15,%ebp
	movl	%eax,%esi
	shrl	$15,%esi
	movl	%ecx,68(%edi)
	shll	$17,%eax
	orl	%esi,%edx
	orl	%ebp,%eax
	movl	%edx,72(%edi)
	movl	%eax,76(%edi)
	movl	-128(%edi),%ebx
	movl	-124(%edi),%ecx
	movl	-120(%edi),%edx
	movl	-116(%edi),%eax
	movl	%ebx,%ebp
	shll	$15,%ebx
	movl	%ecx,%esi
	shrl	$17,%esi
	shll	$15,%ecx
	orl	%esi,%ebx
	movl	%edx,%esi
	shll	$15,%edx
	movl	%ebx,-96(%edi)
	shrl	$17,%esi
	orl	%esi,%ecx
	shrl	$17,%ebp
	movl	%eax,%esi
	shrl	$17,%esi
	movl	%ecx,-92(%edi)
	shll	$15,%eax
	orl	%esi,%edx
	orl	%ebp,%eax
	movl	%edx,-88(%edi)
	movl	%eax,-84(%edi)
	movl	%ebx,%ebp
	shll	$30,%ebx
	movl	%ecx,%esi
	shrl	$2,%esi
	shll	$30,%ecx
	orl	%esi,%ebx
	movl	%edx,%esi
	shll	$30,%edx
	movl	%ebx,-48(%edi)
	shrl	$2,%esi
	orl	%esi,%ecx
	shrl	$2,%ebp
	movl	%eax,%esi
	shrl	$2,%esi
	movl	%ecx,-44(%edi)
	shll	$30,%eax
	orl	%esi,%edx
	orl	%ebp,%eax
	movl	%edx,-40(%edi)
	movl	%eax,-36(%edi)
	movl	%ebx,%ebp
	shll	$15,%ebx
	movl	%ecx,%esi
	shrl	$17,%esi
	shll	$15,%ecx
	orl	%esi,%ebx
	movl	%edx,%esi
	shll	$15,%edx
	shrl	$17,%esi
	orl	%esi,%ecx
	shrl	$17,%ebp
	movl	%eax,%esi
	shrl	$17,%esi
	shll	$15,%eax
	orl	%esi,%edx
	orl	%ebp,%eax
	movl	%edx,-24(%edi)
	movl	%eax,-20(%edi)
	movl	%ebx,%ebp
	shll	$17,%ebx
	movl	%ecx,%esi
	shrl	$15,%esi
	shll	$17,%ecx
	orl	%esi,%ebx
	movl	%edx,%esi
	shll	$17,%edx
	movl	%ebx,(%edi)
	shrl	$15,%esi
	orl	%esi,%ecx
	shrl	$15,%ebp
	movl	%eax,%esi
	shrl	$15,%esi
	movl	%ecx,4(%edi)
	shll	$17,%eax
	orl	%esi,%edx
	orl	%ebp,%eax
	movl	%edx,8(%edi)
	movl	%eax,12(%edi)
	movl	%ebx,%ebp
	shll	$17,%ebx
	movl	%ecx,%esi
	shrl	$15,%esi
	shll	$17,%ecx
	orl	%esi,%ebx
	movl	%edx,%esi
	shll	$17,%edx
	movl	%ebx,16(%edi)
	shrl	$15,%esi
	orl	%esi,%ecx
	shrl	$15,%ebp
	movl	%eax,%esi
	shrl	$15,%esi
	movl	%ecx,20(%edi)
	shll	$17,%eax
	orl	%esi,%edx
	orl	%ebp,%eax
	movl	%edx,24(%edi)
	movl	%eax,28(%edi)
	movl	%ebx,%ebp
	shll	$17,%ebx
	movl	%ecx,%esi
	shrl	$15,%esi
	shll	$17,%ecx
	orl	%esi,%ebx
	movl	%edx,%esi
	shll	$17,%edx
	movl	%ebx,48(%edi)
	shrl	$15,%esi
	orl	%esi,%ecx
	shrl	$15,%ebp
	movl	%eax,%esi
	shrl	$15,%esi
	movl	%ecx,52(%edi)
	shll	$17,%eax
	orl	%esi,%edx
	orl	%ebp,%eax
	movl	%edx,56(%edi)
	movl	%eax,60(%edi)
	movl	$3,%eax
	jmp	.L013done
.align	16
.L0122nd256:
	movl	44(%esp),%esi
	movl	%eax,48(%esi)
	movl	%ebx,52(%esi)
	movl	%ecx,56(%esi)
	movl	%edx,60(%esi)
	xorl	32(%esi),%eax
	xorl	36(%esi),%ebx
	xorl	40(%esi),%ecx
	xorl	44(%esi),%edx
	movl	32(%edi),%esi
	movl	%eax,(%esp)
	movl	%ebx,4(%esp)
	movl	%ecx,8(%esp)
	movl	%edx,12(%esp)
	xorl	%esi,%eax
	xorl	36(%edi),%ebx
	movzbl	%ah,%esi
	movl	2052(%ebp,%esi,8),%edx
	movzbl	%al,%esi
	xorl	4(%ebp,%esi,8),%edx
	shrl	$16,%eax
	movzbl	%bl,%esi
	movl	(%ebp,%esi,8),%ecx
	movzbl	%ah,%esi
	xorl	(%ebp,%esi,8),%edx
	movzbl	%bh,%esi
	xorl	4(%ebp,%esi,8),%ecx
	shrl	$16,%ebx
	movzbl	%al,%eax
	xorl	2048(%ebp,%eax,8),%edx
	movzbl	%bh,%esi
	movl	12(%esp),%eax
	xorl	%edx,%ecx
	rorl	$8,%edx
	xorl	2048(%ebp,%esi,8),%ecx
	movzbl	%bl,%esi
	movl	8(%esp),%ebx
	xorl	%eax,%edx
	xorl	2052(%ebp,%esi,8),%ecx
	movl	40(%edi),%esi
	xorl	%ecx,%edx
	movl	%edx,12(%esp)
	xorl	%ebx,%ecx
	movl	%ecx,8(%esp)
	xorl	%esi,%ecx
	xorl	44(%edi),%edx
	movzbl	%ch,%esi
	movl	2052(%ebp,%esi,8),%ebx
	movzbl	%cl,%esi
	xorl	4(%ebp,%esi,8),%ebx
	shrl	$16,%ecx
	movzbl	%dl,%esi
	movl	(%ebp,%esi,8),%eax
	movzbl	%ch,%esi
	xorl	(%ebp,%esi,8),%ebx
	movzbl	%dh,%esi
	xorl	4(%ebp,%esi,8),%eax
	shrl	$16,%edx
	movzbl	%cl,%ecx
	xorl	2048(%ebp,%ecx,8),%ebx
	movzbl	%dh,%esi
	movl	4(%esp),%ecx
	xorl	%ebx,%eax
	rorl	$8,%ebx
	xorl	2048(%ebp,%esi,8),%eax
	movzbl	%dl,%esi
	movl	(%esp),%edx
	xorl	%ecx,%ebx
	xorl	2052(%ebp,%esi,8),%eax
	movl	48(%edi),%esi
	xorl	%eax,%ebx
	movl	%ebx,4(%esp)
	xorl	%edx,%eax
	movl	%eax,(%esp)
	movl	8(%esp),%ecx
	movl	12(%esp),%edx
	movl	44(%esp),%edi
	leal	128(%edi),%edi
	movl	%eax,-112(%edi)
	movl	%ebx,-108(%edi)
	movl	%ecx,-104(%edi)
	movl	%edx,-100(%edi)
	movl	%eax,%ebp
	shll	$30,%eax
	movl	%ebx,%esi
	shrl	$2,%esi
	shll	$30,%ebx
	orl	%esi,%eax
	movl	%ecx,%esi
	shll	$30,%ecx
	movl	%eax,-48(%edi)
	shrl	$2,%esi
	orl	%esi,%ebx
	shrl	$2,%ebp
	movl	%edx,%esi
	shrl	$2,%esi
	movl	%ebx,-44(%edi)
	shll	$30,%edx
	orl	%esi,%ecx
	orl	%ebp,%edx
	movl	%ecx,-40(%edi)
	movl	%edx,-36(%edi)
	movl	%eax,%ebp
	shll	$30,%eax
	movl	%ebx,%esi
	shrl	$2,%esi
	shll	$30,%ebx
	orl	%esi,%eax
	movl	%ecx,%esi
	shll	$30,%ecx
	movl	%eax,32(%edi)
	shrl	$2,%esi
	orl	%esi,%ebx
	shrl	$2,%ebp
	movl	%edx,%esi
	shrl	$2,%esi
	movl	%ebx,36(%edi)
	shll	$30,%edx
	orl	%esi,%ecx
	orl	%ebp,%edx
	movl	%ecx,40(%edi)
	movl	%edx,44(%edi)
	movl	%ebx,%ebp
	shll	$19,%ebx
	movl	%ecx,%esi
	shrl	$13,%esi
	shll	$19,%ecx
	orl	%esi,%ebx
	movl	%edx,%esi
	shll	$19,%edx
	movl	%ebx,128(%edi)
	shrl	$13,%esi
	orl	%esi,%ecx
	shrl	$13,%ebp
	movl	%eax,%esi
	shrl	$13,%esi
	movl	%ecx,132(%edi)
	shll	$19,%eax
	orl	%esi,%edx
	orl	%ebp,%eax
	movl	%edx,136(%edi)
	movl	%eax,140(%edi)
	movl	-96(%edi),%ebx
	movl	-92(%edi),%ecx
	movl	-88(%edi),%edx
	movl	-84(%edi),%eax
	movl	%ebx,%ebp
	shll	$15,%ebx
	movl	%ecx,%esi
	shrl	$17,%esi
	shll	$15,%ecx
	orl	%esi,%ebx
	movl	%edx,%esi
	shll	$15,%edx
	movl	%ebx,-96(%edi)
	shrl	$17,%esi
	orl	%esi,%ecx
	shrl	$17,%ebp
	movl	%eax,%esi
	shrl	$17,%esi
	movl	%ecx,-92(%edi)
	shll	$15,%eax
	orl	%esi,%edx
	orl	%ebp,%eax
	movl	%edx,-88(%edi)
	movl	%eax,-84(%edi)
	movl	%ebx,%ebp
	shll	$15,%ebx
	movl	%ecx,%esi
	shrl	$17,%esi
	shll	$15,%ecx
	orl	%esi,%ebx
	movl	%edx,%esi
	shll	$15,%edx
	movl	%ebx,-64(%edi)
	shrl	$17,%esi
	orl	%esi,%ecx
	shrl	$17,%ebp
	movl	%eax,%esi
	shrl	$17,%esi
	movl	%ecx,-60(%edi)
	shll	$15,%eax
	orl	%esi,%edx
	orl	%ebp,%eax
	movl	%edx,-56(%edi)
	movl	%eax,-52(%edi)
	movl	%ebx,%ebp
	shll	$30,%ebx
	movl	%ecx,%esi
	shrl	$2,%esi
	shll	$30,%ecx
	orl	%esi,%ebx
	movl	%edx,%esi
	shll	$30,%edx
	movl	%ebx,16(%edi)
	shrl	$2,%esi
	orl	%esi,%ecx
	shrl	$2,%ebp
	movl	%eax,%esi
	shrl	$2,%esi
	movl	%ecx,20(%edi)
	shll	$30,%eax
	orl	%esi,%edx
	orl	%ebp,%eax
	movl	%edx,24(%edi)
	movl	%eax,28(%edi)
	movl	%ecx,%ebp
	shll	$2,%ecx
	movl	%edx,%esi
	shrl	$30,%esi
	shll	$2,%edx
	orl	%esi,%ecx
	movl	%eax,%esi
	shll	$2,%eax
	movl	%ecx,80(%edi)
	shrl	$30,%esi
	orl	%esi,%edx
	shrl	$30,%ebp
	movl	%ebx,%esi
	shrl	$30,%esi
	movl	%edx,84(%edi)
	shll	$2,%ebx
	orl	%esi,%eax
	orl	%ebp,%ebx
	movl	%eax,88(%edi)
	movl	%ebx,92(%edi)
	movl	-80(%edi),%ecx
	movl	-76(%edi),%edx
	movl	-72(%edi),%eax
	movl	-68(%edi),%ebx
	movl	%ecx,%ebp
	shll	$15,%ecx
	movl	%edx,%esi
	shrl	$17,%esi
	shll	$15,%edx
	orl	%esi,%ecx
	movl	%eax,%esi
	shll	$15,%eax
	movl	%ecx,-80(%edi)
	shrl	$17,%esi
	orl	%esi,%edx
	shrl	$17,%ebp
	movl	%ebx,%esi
	shrl	$17,%esi
	movl	%edx,-76(%edi)
	shll	$15,%ebx
	orl	%esi,%eax
	orl	%ebp,%ebx
	movl	%eax,-72(%edi)
	movl	%ebx,-68(%edi)
	movl	%ecx,%ebp
	shll	$30,%ecx
	movl	%edx,%esi
	shrl	$2,%esi
	shll	$30,%edx
	orl	%esi,%ecx
	movl	%eax,%esi
	shll	$30,%eax
	movl	%ecx,-16(%edi)
	shrl	$2,%esi
	orl	%esi,%edx
	shrl	$2,%ebp
	movl	%ebx,%esi
	shrl	$2,%esi
	movl	%edx,-12(%edi)
	shll	$30,%ebx
	orl	%esi,%eax
	orl	%ebp,%ebx
	movl	%eax,-8(%edi)
	movl	%ebx,-4(%edi)
	movl	%edx,64(%edi)
	movl	%eax,68(%edi)
	movl	%ebx,72(%edi)
	movl	%ecx,76(%edi)
	movl	%edx,%ebp
	shll	$17,%edx
	movl	%eax,%esi
	shrl	$15,%esi
	shll	$17,%eax
	orl	%esi,%edx
	movl	%ebx,%esi
	shll	$17,%ebx
	movl	%edx,96(%edi)
	shrl	$15,%esi
	orl	%esi,%eax
	shrl	$15,%ebp
	movl	%ecx,%esi
	shrl	$15,%esi
	movl	%eax,100(%edi)
	shll	$17,%ecx
	orl	%esi,%ebx
	orl	%ebp,%ecx
	movl	%ebx,104(%edi)
	movl	%ecx,108(%edi)
	movl	-128(%edi),%edx
	movl	-124(%edi),%eax
	movl	-120(%edi),%ebx
	movl	-116(%edi),%ecx
	movl	%eax,%ebp
	shll	$13,%eax
	movl	%ebx,%esi
	shrl	$19,%esi
	shll	$13,%ebx
	orl	%esi,%eax
	movl	%ecx,%esi
	shll	$13,%ecx
	movl	%eax,-32(%edi)
	shrl	$19,%esi
	orl	%esi,%ebx
	shrl	$19,%ebp
	movl	%edx,%esi
	shrl	$19,%esi
	movl	%ebx,-28(%edi)
	shll	$13,%edx
	orl	%esi,%ecx
	orl	%ebp,%edx
	movl	%ecx,-24(%edi)
	movl	%edx,-20(%edi)
	movl	%eax,%ebp
	shll	$15,%eax
	movl	%ebx,%esi
	shrl	$17,%esi
	shll	$15,%ebx
	orl	%esi,%eax
	movl	%ecx,%esi
	shll	$15,%ecx
	movl	%eax,(%edi)
	shrl	$17,%esi
	orl	%esi,%ebx
	shrl	$17,%ebp
	movl	%edx,%esi
	shrl	$17,%esi
	movl	%ebx,4(%edi)
	shll	$15,%edx
	orl	%esi,%ecx
	orl	%ebp,%edx
	movl	%ecx,8(%edi)
	movl	%edx,12(%edi)
	movl	%eax,%ebp
	shll	$17,%eax
	movl	%ebx,%esi
	shrl	$15,%esi
	shll	$17,%ebx
	orl	%esi,%eax
	movl	%ecx,%esi
	shll	$17,%ecx
	movl	%eax,48(%edi)
	shrl	$15,%esi
	orl	%esi,%ebx
	shrl	$15,%ebp
	movl	%edx,%esi
	shrl	$15,%esi
	movl	%ebx,52(%edi)
	shll	$17,%edx
	orl	%esi,%ecx
	orl	%ebp,%edx
	movl	%ecx,56(%edi)
	movl	%edx,60(%edi)
	movl	%ebx,%ebp
	shll	$2,%ebx
	movl	%ecx,%esi
	shrl	$30,%esi
	shll	$2,%ecx
	orl	%esi,%ebx
	movl	%edx,%esi
	shll	$2,%edx
	movl	%ebx,112(%edi)
	shrl	$30,%esi
	orl	%esi,%ecx
	shrl	$30,%ebp
	movl	%eax,%esi
	shrl	$30,%esi
	movl	%ecx,116(%edi)
	shll	$2,%eax
	orl	%esi,%edx
	orl	%ebp,%eax
	movl	%edx,120(%edi)
	movl	%eax,124(%edi)
	movl	$4,%eax
.L013done:
	leal	144(%edi),%edx
	addl	$16,%esp
	popl	%edi
	popl	%esi
	popl	%ebx
	popl	%ebp
	ret
.size	Camellia_Ekeygen,.-.L_Camellia_Ekeygen_begin
.globl	Camellia_set_key
.type	Camellia_set_key,@function
.align	16
Camellia_set_key:
.L_Camellia_set_key_begin:
	pushl	%ebx
	movl	8(%esp),%ecx
	movl	12(%esp),%ebx
	movl	16(%esp),%edx
	movl	$-1,%eax
	testl	%ecx,%ecx
	jz	.L014done
	testl	%edx,%edx
	jz	.L014done
	movl	$-2,%eax
	cmpl	$256,%ebx
	je	.L015arg_ok
	cmpl	$192,%ebx
	je	.L015arg_ok
	cmpl	$128,%ebx
	jne	.L014done
.align	4
.L015arg_ok:
	pushl	%edx
	pushl	%ecx
	pushl	%ebx
	call	.L_Camellia_Ekeygen_begin
	addl	$12,%esp
	movl	%eax,(%edx)
	xorl	%eax,%eax
.align	4
.L014done:
	popl	%ebx
	ret
.size	Camellia_set_key,.-.L_Camellia_set_key_begin
.align	64
.LCamellia_SIGMA:
.long	2694735487,1003262091,3061508184,1286239154,3337565999,3914302142,1426019237,4057165596,283453434,3731369245,2958461122,3018244605,0,0,0,0
.align	64
.LCamellia_SBOX:
.long	1886416896,1886388336
.long	2189591040,741081132
.long	741092352,3014852787
.long	3974949888,3233808576
.long	3014898432,3840147684
.long	656877312,1465319511
.long	3233857536,3941204202
.long	3857048832,2930639022
.long	3840205824,589496355
.long	2240120064,1802174571
.long	1465341696,1162149957
.long	892679424,2779054245
.long	3941263872,3991732461
.long	202116096,1330577487
.long	2930683392,488439837
.long	1094795520,2459041938
.long	589505280,2256928902
.long	4025478912,2947481775
.long	1802201856,2088501372
.long	2475922176,522125343
.long	1162167552,1044250686
.long	421075200,3705405660
.long	2779096320,1583218782
.long	555819264,185270283
.long	3991792896,2795896998
.long	235802112,960036921
.long	1330597632,3587506389
.long	1313754624,1566376029
.long	488447232,3654877401
.long	1701143808,1515847770
.long	2459079168,1364262993
.long	3183328512,1819017324
.long	2256963072,2341142667
.long	3099113472,2593783962
.long	2947526400,4227531003
.long	2408550144,2964324528
.long	2088532992,1953759348
.long	3958106880,724238379
.long	522133248,4042260720
.long	3469659648,2223243396
.long	1044266496,3755933919
.long	808464384,3419078859
.long	3705461760,875823156
.long	1600085760,1987444854
.long	1583242752,1835860077
.long	3318072576,2846425257
.long	185273088,3520135377
.long	437918208,67371012
.long	2795939328,336855060
.long	3789676800,976879674
.long	960051456,3739091166
.long	3402287616,286326801
.long	3587560704,842137650
.long	1195853568,2627469468
.long	1566399744,1397948499
.long	1027423488,4075946226
.long	3654932736,4278059262
.long	16843008,3486449871
.long	1515870720,3284336835
.long	3604403712,2054815866
.long	1364283648,606339108
.long	1448498688,3907518696
.long	1819044864,1616904288
.long	1296911616,1768489065
.long	2341178112,2863268010
.long	218959104,2694840480
.long	2593823232,2711683233
.long	1717986816,1650589794
.long	4227595008,1414791252
.long	3435973632,505282590
.long	2964369408,3772776672
.long	757935360,1684275300
.long	1953788928,269484048
.long	303174144,0
.long	724249344,2745368739
.long	538976256,1970602101
.long	4042321920,2324299914
.long	2981212416,3873833190
.long	2223277056,151584777
.long	2576980224,3722248413
.long	3755990784,2273771655
.long	1280068608,2206400643
.long	3419130624,3452764365
.long	3267543552,2425356432
.long	875836416,1936916595
.long	2122219008,4143317238
.long	1987474944,2644312221
.long	84215040,3216965823
.long	1835887872,1381105746
.long	3082270464,3638034648
.long	2846468352,3368550600
.long	825307392,3334865094
.long	3520188672,2172715137
.long	387389184,1869545583
.long	67372032,320012307
.long	3621246720,1667432547
.long	336860160,3924361449
.long	1482184704,2812739751
.long	976894464,2677997727
.long	1633771776,3166437564
.long	3739147776,690552873
.long	454761216,4193845497
.long	286331136,791609391
.long	471604224,3031695540
.long	842150400,2021130360
.long	252645120,101056518
.long	2627509248,3890675943
.long	370546176,1903231089
.long	1397969664,3570663636
.long	404232192,2880110763
.long	4076007936,2290614408
.long	572662272,2374828173
.long	4278124032,1920073842
.long	1145324544,3115909305
.long	3486502656,4177002744
.long	2998055424,2896953516
.long	3284386560,909508662
.long	3048584448,707395626
.long	2054846976,1010565180
.long	2442236160,4059103473
.long	606348288,1077936192
.long	134744064,3553820883
.long	3907577856,3149594811
.long	2829625344,1128464451
.long	1616928768,353697813
.long	4244438016,2913796269
.long	1768515840,2004287607
.long	1347440640,2155872384
.long	2863311360,2189557890
.long	3503345664,3974889708
.long	2694881280,656867367
.long	2105376000,3856990437
.long	2711724288,2240086149
.long	2307492096,892665909
.long	1650614784,202113036
.long	2543294208,1094778945
.long	1414812672,4025417967
.long	1532713728,2475884691
.long	505290240,421068825
.long	2509608192,555810849
.long	3772833792,235798542
.long	4294967040,1313734734
.long	1684300800,1701118053
.long	3537031680,3183280317
.long	269488128,3099066552
.long	3301229568,2408513679
.long	0,3958046955
.long	1212696576,3469607118
.long	2745410304,808452144
.long	4160222976,1600061535
.long	1970631936,3318022341
.long	3688618752,437911578
.long	2324335104,3789619425
.long	50529024,3402236106
.long	3873891840,1195835463
.long	3671775744,1027407933
.long	151587072,16842753
.long	1061109504,3604349142
.long	3722304768,1448476758
.long	2492765184,1296891981
.long	2273806080,218955789
.long	1549556736,1717960806
.long	2206434048,3435921612
.long	33686016,757923885
.long	3452816640,303169554
.long	1246382592,538968096
.long	2425393152,2981167281
.long	858993408,2576941209
.long	1936945920,1280049228
.long	1734829824,3267494082
.long	4143379968,2122186878
.long	4092850944,84213765
.long	2644352256,3082223799
.long	2139062016,825294897
.long	3217014528,387383319
.long	3806519808,3621191895
.long	1381126656,1482162264
.long	2610666240,1633747041
.long	3638089728,454754331
.long	640034304,471597084
.long	3368601600,252641295
.long	926365440,370540566
.long	3334915584,404226072
.long	993737472,572653602
.long	2172748032,1145307204
.long	2526451200,2998010034
.long	1869573888,3048538293
.long	1263225600,2442199185
.long	320017152,134742024
.long	3200171520,2829582504
.long	1667457792,4244373756
.long	774778368,1347420240
.long	3924420864,3503292624
.long	2038003968,2105344125
.long	2812782336,2307457161
.long	2358021120,2543255703
.long	2678038272,1532690523
.long	1852730880,2509570197
.long	3166485504,4294902015
.long	2391707136,3536978130
.long	690563328,3301179588
.long	4126536960,1212678216
.long	4193908992,4160159991
.long	3065427456,3688562907
.long	791621376,50528259
.long	4261281024,3671720154
.long	3031741440,1061093439
.long	1499027712,2492727444
.long	2021160960,1549533276
.long	2560137216,33685506
.long	101058048,1246363722
.long	1785358848,858980403
.long	3890734848,1734803559
.long	1179010560,4092788979
.long	1903259904,2139029631
.long	3132799488,3806462178
.long	3570717696,2610626715
.long	623191296,640024614
.long	2880154368,926351415
.long	1111638528,993722427
.long	2290649088,2526412950
.long	2728567296,1263206475
.long	2374864128,3200123070
.long	4210752000,774766638
.long	1920102912,2037973113
.long	117901056,2357985420
.long	3115956480,1852702830
.long	1431655680,2391670926
.long	4177065984,4126474485
.long	4008635904,3065381046
.long	2896997376,4261216509
.long	168430080,1499005017
.long	909522432,2560098456
.long	1229539584,1785331818
.long	707406336,1178992710
.long	1751672832,3132752058
.long	1010580480,623181861
.long	943208448,1111621698
.long	4059164928,2728525986
.long	2762253312,4210688250
.long	1077952512,117899271
.long	673720320,1431634005
.long	3553874688,4008575214
.long	2071689984,168427530
.long	3149642496,1229520969
.long	3385444608,1751646312
.long	1128481536,943194168
.long	3250700544,2762211492
.long	353703168,673710120
.long	3823362816,2071658619
.long	2913840384,3385393353
.long	4109693952,3250651329
.long	2004317952,3823304931
.long	3351758592,4109631732
.long	2155905024,3351707847
.long	2661195264,2661154974
.long	14737632,939538488
.long	328965,1090535745
.long	5789784,369104406
.long	14277081,1979741814
.long	6776679,3640711641
.long	5131854,2466288531
.long	8487297,1610637408
.long	13355979,4060148466
.long	13224393,1912631922
.long	723723,3254829762
.long	11447982,2868947883
.long	6974058,2583730842
.long	14013909,1962964341
.long	1579032,100664838
.long	6118749,1459640151
.long	8553090,2684395680
.long	4605510,2432733585
.long	14671839,4144035831
.long	14079702,3036722613
.long	2565927,3372272073
.long	9079434,2717950626
.long	3289650,2348846220
.long	4934475,3523269330
.long	4342338,2415956112
.long	14408667,4127258358
.long	1842204,117442311
.long	10395294,2801837991
.long	10263708,654321447
.long	3815994,2382401166
.long	13290186,2986390194
.long	2434341,1224755529
.long	8092539,3724599006
.long	855309,1124090691
.long	7434609,1543527516
.long	6250335,3607156695
.long	2039583,3338717127
.long	16316664,1040203326
.long	14145495,4110480885
.long	4079166,2399178639
.long	10329501,1728079719
.long	8158332,520101663
.long	6316128,402659352
.long	12171705,1845522030
.long	12500670,2936057775
.long	12369084,788541231
.long	9145227,3791708898
.long	1447446,2231403909
.long	3421236,218107149
.long	5066061,1392530259
.long	12829635,4026593520
.long	7500402,2617285788
.long	9803157,1694524773
.long	11250603,3925928682
.long	9342606,2734728099
.long	12237498,2919280302
.long	8026746,2650840734
.long	11776947,3959483628
.long	131586,2147516544
.long	11842740,754986285
.long	11382189,1795189611
.long	10658466,2818615464
.long	11316396,721431339
.long	14211288,905983542
.long	10132122,2785060518
.long	1513239,3305162181
.long	1710618,2248181382
.long	3487029,1291865421
.long	13421772,855651123
.long	16250871,4244700669
.long	10066329,1711302246
.long	6381921,1476417624
.long	5921370,2516620950
.long	15263976,973093434
.long	2368548,150997257
.long	5658198,2499843477
.long	4210752,268439568
.long	14803425,2013296760
.long	6513507,3623934168
.long	592137,1107313218
.long	3355443,3422604492
.long	12566463,4009816047
.long	10000536,637543974
.long	9934743,3842041317
.long	8750469,1627414881
.long	6842472,436214298
.long	16579836,1056980799
.long	15527148,989870907
.long	657930,2181071490
.long	14342874,3053500086
.long	7303023,3674266587
.long	5460819,3556824276
.long	6447714,2550175896
.long	10724259,3892373736
.long	3026478,2332068747
.long	526344,33554946
.long	11513775,3942706155
.long	2631720,167774730
.long	11579568,738208812
.long	7631988,486546717
.long	12763842,2952835248
.long	12434877,1862299503
.long	3552822,2365623693
.long	2236962,2281736328
.long	3684408,234884622
.long	6579300,419436825
.long	1973790,2264958855
.long	3750201,1308642894
.long	2894892,184552203
.long	10921638,2835392937
.long	3158064,201329676
.long	15066597,2030074233
.long	4473924,285217041
.long	16645629,2130739071
.long	8947848,570434082
.long	10461087,3875596263
.long	6645093,1493195097
.long	8882055,3774931425
.long	7039851,3657489114
.long	16053492,1023425853
.long	2302755,3355494600
.long	4737096,301994514
.long	1052688,67109892
.long	13750737,1946186868
.long	5329233,1409307732
.long	12632256,805318704
.long	16382457,2113961598
.long	13816530,3019945140
.long	10526880,671098920
.long	5592405,1426085205
.long	10592673,1744857192
.long	4276545,1342197840
.long	16448250,3187719870
.long	4408131,3489714384
.long	1250067,3288384708
.long	12895428,822096177
.long	3092271,3405827019
.long	11053224,704653866
.long	11974326,2902502829
.long	3947580,251662095
.long	2829099,3389049546
.long	12698049,1879076976
.long	16777215,4278255615
.long	13158600,838873650
.long	10855845,1761634665
.long	2105376,134219784
.long	9013641,1644192354
.long	0,0
.long	9474192,603989028
.long	4671303,3506491857
.long	15724527,4211145723
.long	15395562,3120609978
.long	12040119,3976261101
.long	1381653,1157645637
.long	394758,2164294017
.long	13487565,1929409395
.long	11908533,1828744557
.long	1184274,2214626436
.long	8289918,2667618207
.long	12303291,3993038574
.long	2697513,1241533002
.long	986895,3271607235
.long	12105912,771763758
.long	460551,3238052289
.long	263172,16777473
.long	10197915,3858818790
.long	9737364,620766501
.long	2171169,1207978056
.long	6710886,2566953369
.long	15132390,3103832505
.long	13553358,3003167667
.long	15592941,2063629179
.long	15198183,4177590777
.long	3881787,3456159438
.long	16711422,3204497343
.long	8355711,3741376479
.long	12961221,1895854449
.long	10790052,687876393
.long	3618615,3439381965
.long	11645361,1811967084
.long	5000268,318771987
.long	9539985,1677747300
.long	7237230,2600508315
.long	9276813,1660969827
.long	7763574,2634063261
.long	197379,3221274816
.long	2960685,1258310475
.long	14606046,3070277559
.long	9868950,2768283045
.long	2500134,2298513801
.long	8224125,1593859935
.long	13027014,2969612721
.long	6052956,385881879
.long	13882323,4093703412
.long	15921906,3154164924
.long	5197647,3540046803
.long	1644825,1174423110
.long	4144959,3472936911
.long	14474460,922761015
.long	7960953,1577082462
.long	1907997,1191200583
.long	5395026,2483066004
.long	15461355,4194368250
.long	15987699,4227923196
.long	7171437,1526750043
.long	6184542,2533398423
.long	16514043,4261478142
.long	6908265,1509972570
.long	11711154,2885725356
.long	15790320,1006648380
.long	3223857,1275087948
.long	789516,50332419
.long	13948116,889206069
.long	13619151,4076925939
.long	9211020,587211555
.long	14869218,3087055032
.long	7697781,1560304989
.long	11119017,1778412138
.long	4868682,2449511058
.long	5723991,3573601749
.long	8684676,553656609
.long	1118481,1140868164
.long	4539717,1358975313
.long	1776411,3321939654
.long	16119285,2097184125
.long	15000804,956315961
.long	921102,2197848963
.long	7566195,3691044060
.long	11184810,2852170410
.long	15856113,2080406652
.long	14540253,1996519287
.long	5855577,1442862678
.long	1315860,83887365
.long	7105644,452991771
.long	9605778,2751505572
.long	5526612,352326933
.long	13684944,872428596
.long	7895160,503324190
.long	7368816,469769244
.long	14935011,4160813304
.long	4802889,1375752786
.long	8421504,536879136
.long	5263440,335549460
.long	10987431,3909151209
.long	16185078,3170942397
.long	7829367,3707821533
.long	9671571,3825263844
.long	8816262,2701173153
.long	8618883,3758153952
.long	2763306,2315291274
.long	13092807,4043370993
.long	5987163,3590379222
.long	15329769,2046851706
.long	15658734,3137387451
.long	9408399,3808486371
.long	65793,1073758272
.long	4013373,1325420367
.globl	Camellia_cbc_encrypt
.type	Camellia_cbc_encrypt,@function
.align	16
Camellia_cbc_encrypt:
.L_Camellia_cbc_encrypt_begin:
	pushl	%ebp
	pushl	%ebx
	pushl	%esi
	pushl	%edi
	movl	28(%esp),%ecx
	cmpl	$0,%ecx
	je	.L016enc_out
	pushfl
	cld
	movl	24(%esp),%eax
	movl	28(%esp),%ebx
	movl	36(%esp),%edx
	movl	40(%esp),%ebp
	leal	-64(%esp),%esi
	andl	$-64,%esi
	leal	-127(%edx),%edi
	subl	%esi,%edi
	negl	%edi
	andl	$960,%edi
	subl	%edi,%esi
	movl	44(%esp),%edi
	xchgl	%esi,%esp
	addl	$4,%esp
	movl	%esi,20(%esp)
	movl	%eax,24(%esp)
	movl	%ebx,28(%esp)
	movl	%ecx,32(%esp)
	movl	%edx,36(%esp)
	movl	%ebp,40(%esp)
	call	.L017pic_point
.L017pic_point:
	popl	%ebp
	leal	.LCamellia_SBOX-.L017pic_point(%ebp),%ebp
	movl	$32,%esi
.align	4
.L018prefetch_sbox:
	movl	(%ebp),%eax
	movl	32(%ebp),%ebx
	movl	64(%ebp),%ecx
	movl	96(%ebp),%edx
	leal	128(%ebp),%ebp
	decl	%esi
	jnz	.L018prefetch_sbox
	movl	36(%esp),%eax
	subl	$4096,%ebp
	movl	24(%esp),%esi
	movl	272(%eax),%edx
	cmpl	$0,%edi
	je	.L019DECRYPT
	movl	32(%esp),%ecx
	movl	40(%esp),%edi
	shll	$6,%edx
	leal	(%eax,%edx,1),%edx
	movl	%edx,16(%esp)
	testl	$4294967280,%ecx
	jz	.L020enc_tail
	movl	(%edi),%eax
	movl	4(%edi),%ebx
.align	4
.L021enc_loop:
	movl	8(%edi),%ecx
	movl	12(%edi),%edx
	xorl	(%esi),%eax
	xorl	4(%esi),%ebx
	xorl	8(%esi),%ecx
	bswap	%eax
	xorl	12(%esi),%edx
	bswap	%ebx
	movl	36(%esp),%edi
	bswap	%ecx
	bswap	%edx
	call	_x86_Camellia_encrypt
	movl	24(%esp),%esi
	movl	28(%esp),%edi
	bswap	%eax
	bswap	%ebx
	bswap	%ecx
	movl	%eax,(%edi)
	bswap	%edx
	movl	%ebx,4(%edi)
	movl	%ecx,8(%edi)
	movl	%edx,12(%edi)
	movl	32(%esp),%ecx
	leal	16(%esi),%esi
	movl	%esi,24(%esp)
	leal	16(%edi),%edx
	movl	%edx,28(%esp)
	subl	$16,%ecx
	testl	$4294967280,%ecx
	movl	%ecx,32(%esp)
	jnz	.L021enc_loop
	testl	$15,%ecx
	jnz	.L020enc_tail
	movl	40(%esp),%esi
	movl	8(%edi),%ecx
	movl	12(%edi),%edx
	movl	%eax,(%esi)
	movl	%ebx,4(%esi)
	movl	%ecx,8(%esi)
	movl	%edx,12(%esi)
	movl	20(%esp),%esp
	popfl
.L016enc_out:
	popl	%edi
	popl	%esi
	popl	%ebx
	popl	%ebp
	ret
	pushfl
.align	4
.L020enc_tail:
	movl	%edi,%eax
	movl	28(%esp),%edi
	pushl	%eax
	movl	$16,%ebx
	subl	%ecx,%ebx
	cmpl	%esi,%edi
	je	.L022enc_in_place
.align	4
.long	2767451785
	jmp	.L023enc_skip_in_place
.L022enc_in_place:
	leal	(%edi,%ecx,1),%edi
.L023enc_skip_in_place:
	movl	%ebx,%ecx
	xorl	%eax,%eax
.align	4
.long	2868115081
	popl	%edi
	movl	28(%esp),%esi
	movl	(%edi),%eax
	movl	4(%edi),%ebx
	movl	$16,32(%esp)
	jmp	.L021enc_loop
.align	16
.L019DECRYPT:
	shll	$6,%edx
	leal	(%eax,%edx,1),%edx
	movl	%eax,16(%esp)
	movl	%edx,36(%esp)
	cmpl	28(%esp),%esi
	je	.L024dec_in_place
	movl	40(%esp),%edi
	movl	%edi,44(%esp)
.align	4
.L025dec_loop:
	movl	(%esi),%eax
	movl	4(%esi),%ebx
	movl	8(%esi),%ecx
	bswap	%eax
	movl	12(%esi),%edx
	bswap	%ebx
	movl	36(%esp),%edi
	bswap	%ecx
	bswap	%edx
	call	_x86_Camellia_decrypt
	movl	44(%esp),%edi
	movl	32(%esp),%esi
	bswap	%eax
	bswap	%ebx
	bswap	%ecx
	xorl	(%edi),%eax
	bswap	%edx
	xorl	4(%edi),%ebx
	xorl	8(%edi),%ecx
	xorl	12(%edi),%edx
	subl	$16,%esi
	jc	.L026dec_partial
	movl	%esi,32(%esp)
	movl	24(%esp),%esi
	movl	28(%esp),%edi
	movl	%eax,(%edi)
	movl	%ebx,4(%edi)
	movl	%ecx,8(%edi)
	movl	%edx,12(%edi)
	movl	%esi,44(%esp)
	leal	16(%esi),%esi
	movl	%esi,24(%esp)
	leal	16(%edi),%edi
	movl	%edi,28(%esp)
	jnz	.L025dec_loop
	movl	44(%esp),%edi
.L027dec_end:
	movl	40(%esp),%esi
	movl	(%edi),%eax
	movl	4(%edi),%ebx
	movl	8(%edi),%ecx
	movl	12(%edi),%edx
	movl	%eax,(%esi)
	movl	%ebx,4(%esi)
	movl	%ecx,8(%esi)
	movl	%edx,12(%esi)
	jmp	.L028dec_out
.align	4
.L026dec_partial:
	leal	44(%esp),%edi
	movl	%eax,(%edi)
	movl	%ebx,4(%edi)
	movl	%ecx,8(%edi)
	movl	%edx,12(%edi)
	leal	16(%esi),%ecx
	movl	%edi,%esi
	movl	28(%esp),%edi
.long	2767451785
	movl	24(%esp),%edi
	jmp	.L027dec_end
.align	4
.L024dec_in_place:
.L029dec_in_place_loop:
	leal	44(%esp),%edi
	movl	(%esi),%eax
	movl	4(%esi),%ebx
	movl	8(%esi),%ecx
	movl	12(%esi),%edx
	movl	%eax,(%edi)
	movl	%ebx,4(%edi)
	movl	%ecx,8(%edi)
	bswap	%eax
	movl	%edx,12(%edi)
	bswap	%ebx
	movl	36(%esp),%edi
	bswap	%ecx
	bswap	%edx
	call	_x86_Camellia_decrypt
	movl	40(%esp),%edi
	movl	28(%esp),%esi
	bswap	%eax
	bswap	%ebx
	bswap	%ecx
	xorl	(%edi),%eax
	bswap	%edx
	xorl	4(%edi),%ebx
	xorl	8(%edi),%ecx
	xorl	12(%edi),%edx
	movl	%eax,(%esi)
	movl	%ebx,4(%esi)
	movl	%ecx,8(%esi)
	movl	%edx,12(%esi)
	leal	16(%esi),%esi
	movl	%esi,28(%esp)
	leal	44(%esp),%esi
	movl	(%esi),%eax
	movl	4(%esi),%ebx
	movl	8(%esi),%ecx
	movl	12(%esi),%edx
	movl	%eax,(%edi)
	movl	%ebx,4(%edi)
	movl	%ecx,8(%edi)
	movl	%edx,12(%edi)
	movl	24(%esp),%esi
	leal	16(%esi),%esi
	movl	%esi,24(%esp)
	movl	32(%esp),%ecx
	subl	$16,%ecx
	jc	.L030dec_in_place_partial
	movl	%ecx,32(%esp)
	jnz	.L029dec_in_place_loop
	jmp	.L028dec_out
.align	4
.L030dec_in_place_partial:
	movl	28(%esp),%edi
	leal	44(%esp),%esi
	leal	(%edi,%ecx,1),%edi
	leal	16(%esi,%ecx,1),%esi
	negl	%ecx
.long	2767451785
.align	4
.L028dec_out:
	movl	20(%esp),%esp
	popfl
	popl	%edi
	popl	%esi
	popl	%ebx
	popl	%ebp
	ret
.size	Camellia_cbc_encrypt,.-.L_Camellia_cbc_encrypt_begin
.byte	67,97,109,101,108,108,105,97,32,102,111,114,32,120,56,54
.byte	32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115
.byte	115,108,46,111,114,103,62,0

File Added: src/crypto/external/bsd/openssl/lib/libcrypto/arch/i386/uplink-x86.S
.file	"uplink-x86.s"
.text
.globl	OPENSSL_UplinkTable
.type	_$lazy1,@function
.align	16
_$lazy1:
	leal	OPENSSL_UplinkTable,%eax
	pushl	%eax
	pushl	$1
	call	OPENSSL_Uplink
	addl	$8,%esp
	popl	%eax
	jmp	*4(%eax)
.size	_$lazy1,.-_$lazy1
.type	_$lazy2,@function
.align	16
_$lazy2:
	leal	OPENSSL_UplinkTable,%eax
	pushl	%eax
	pushl	$2
	call	OPENSSL_Uplink
	addl	$8,%esp
	popl	%eax
	jmp	*8(%eax)
.size	_$lazy2,.-_$lazy2
.type	_$lazy3,@function
.align	16
_$lazy3:
	leal	OPENSSL_UplinkTable,%eax
	pushl	%eax
	pushl	$3
	call	OPENSSL_Uplink
	addl	$8,%esp
	popl	%eax
	jmp	*12(%eax)
.size	_$lazy3,.-_$lazy3
.type	_$lazy4,@function
.align	16
_$lazy4:
	leal	OPENSSL_UplinkTable,%eax
	pushl	%eax
	pushl	$4
	call	OPENSSL_Uplink
	addl	$8,%esp
	popl	%eax
	jmp	*16(%eax)
.size	_$lazy4,.-_$lazy4
.type	_$lazy5,@function
.align	16
_$lazy5:
	leal	OPENSSL_UplinkTable,%eax
	pushl	%eax
	pushl	$5
	call	OPENSSL_Uplink
	addl	$8,%esp
	popl	%eax
	jmp	*20(%eax)
.size	_$lazy5,.-_$lazy5
.type	_$lazy6,@function
.align	16
_$lazy6:
	leal	OPENSSL_UplinkTable,%eax
	pushl	%eax
	pushl	$6
	call	OPENSSL_Uplink
	addl	$8,%esp
	popl	%eax
	jmp	*24(%eax)
.size	_$lazy6,.-_$lazy6
.type	_$lazy7,@function
.align	16
_$lazy7:
	leal	OPENSSL_UplinkTable,%eax
	pushl	%eax
	pushl	$7
	call	OPENSSL_Uplink
	addl	$8,%esp
	popl	%eax
	jmp	*28(%eax)
.size	_$lazy7,.-_$lazy7
.type	_$lazy8,@function
.align	16
_$lazy8:
	leal	OPENSSL_UplinkTable,%eax
	pushl	%eax
	pushl	$8
	call	OPENSSL_Uplink
	addl	$8,%esp
	popl	%eax
	jmp	*32(%eax)
.size	_$lazy8,.-_$lazy8
.type	_$lazy9,@function
.align	16
_$lazy9:
	leal	OPENSSL_UplinkTable,%eax
	pushl	%eax
	pushl	$9
	call	OPENSSL_Uplink
	addl	$8,%esp
	popl	%eax
	jmp	*36(%eax)
.size	_$lazy9,.-_$lazy9
.type	_$lazy10,@function
.align	16
_$lazy10:
	leal	OPENSSL_UplinkTable,%eax
	pushl	%eax
	pushl	$10
	call	OPENSSL_Uplink
	addl	$8,%esp
	popl	%eax
	jmp	*40(%eax)
.size	_$lazy10,.-_$lazy10
.type	_$lazy11,@function
.align	16
_$lazy11:
	leal	OPENSSL_UplinkTable,%eax
	pushl	%eax
	pushl	$11
	call	OPENSSL_Uplink
	addl	$8,%esp
	popl	%eax
	jmp	*44(%eax)
.size	_$lazy11,.-_$lazy11
.type	_$lazy12,@function
.align	16
_$lazy12:
	leal	OPENSSL_UplinkTable,%eax
	pushl	%eax
	pushl	$12
	call	OPENSSL_Uplink
	addl	$8,%esp
	popl	%eax
	jmp	*48(%eax)
.size	_$lazy12,.-_$lazy12
.type	_$lazy13,@function
.align	16
_$lazy13:
	leal	OPENSSL_UplinkTable,%eax
	pushl	%eax
	pushl	$13
	call	OPENSSL_Uplink
	addl	$8,%esp
	popl	%eax
	jmp	*52(%eax)
.size	_$lazy13,.-_$lazy13
.type	_$lazy14,@function
.align	16
_$lazy14:
	leal	OPENSSL_UplinkTable,%eax
	pushl	%eax
	pushl	$14
	call	OPENSSL_Uplink
	addl	$8,%esp
	popl	%eax
	jmp	*56(%eax)
.size	_$lazy14,.-_$lazy14
.type	_$lazy15,@function
.align	16
_$lazy15:
	leal	OPENSSL_UplinkTable,%eax
	pushl	%eax
	pushl	$15
	call	OPENSSL_Uplink
	addl	$8,%esp
	popl	%eax
	jmp	*60(%eax)
.size	_$lazy15,.-_$lazy15
.type	_$lazy16,@function
.align	16
_$lazy16:
	leal	OPENSSL_UplinkTable,%eax
	pushl	%eax
	pushl	$16
	call	OPENSSL_Uplink
	addl	$8,%esp
	popl	%eax
	jmp	*64(%eax)
.size	_$lazy16,.-_$lazy16
.type	_$lazy17,@function
.align	16
_$lazy17:
	leal	OPENSSL_UplinkTable,%eax
	pushl	%eax
	pushl	$17
	call	OPENSSL_Uplink
	addl	$8,%esp
	popl	%eax
	jmp	*68(%eax)
.size	_$lazy17,.-_$lazy17
.type	_$lazy18,@function
.align	16
_$lazy18:
	leal	OPENSSL_UplinkTable,%eax
	pushl	%eax
	pushl	$18
	call	OPENSSL_Uplink
	addl	$8,%esp
	popl	%eax
	jmp	*72(%eax)
.size	_$lazy18,.-_$lazy18
.type	_$lazy19,@function
.align	16
_$lazy19:
	leal	OPENSSL_UplinkTable,%eax
	pushl	%eax
	pushl	$19
	call	OPENSSL_Uplink
	addl	$8,%esp
	popl	%eax
	jmp	*76(%eax)
.size	_$lazy19,.-_$lazy19
.type	_$lazy20,@function
.align	16
_$lazy20:
	leal	OPENSSL_UplinkTable,%eax
	pushl	%eax
	pushl	$20
	call	OPENSSL_Uplink
	addl	$8,%esp
	popl	%eax
	jmp	*80(%eax)
.size	_$lazy20,.-_$lazy20
.type	_$lazy21,@function
.align	16
_$lazy21:
	leal	OPENSSL_UplinkTable,%eax
	pushl	%eax
	pushl	$21
	call	OPENSSL_Uplink
	addl	$8,%esp
	popl	%eax
	jmp	*84(%eax)
.size	_$lazy21,.-_$lazy21
.type	_$lazy22,@function
.align	16
_$lazy22:
	leal	OPENSSL_UplinkTable,%eax
	pushl	%eax
	pushl	$22
	call	OPENSSL_Uplink
	addl	$8,%esp
	popl	%eax
	jmp	*88(%eax)
.size	_$lazy22,.-_$lazy22
.data
.align	4
OPENSSL_UplinkTable:
.long	22
.long	_$lazy1
.long	_$lazy2
.long	_$lazy3
.long	_$lazy4
.long	_$lazy5
.long	_$lazy6
.long	_$lazy7
.long	_$lazy8
.long	_$lazy9
.long	_$lazy10
.long	_$lazy11
.long	_$lazy12
.long	_$lazy13
.long	_$lazy14
.long	_$lazy15
.long	_$lazy16
.long	_$lazy17
.long	_$lazy18
.long	_$lazy19
.long	_$lazy20
.long	_$lazy21
.long	_$lazy22

File Added: src/crypto/external/bsd/openssl/lib/libcrypto/arch/sparc64/sha1-sparcv9a.S
.section	".text",#alloc,#execinstr

.align	64
vis_const:
.long	0x5a827999,0x5a827999	! K_00_19
.long	0x6ed9eba1,0x6ed9eba1	! K_20_39
.long	0x8f1bbcdc,0x8f1bbcdc	! K_40_59
.long	0xca62c1d6,0xca62c1d6	! K_60_79
.long	0x00000100,0x00000100
.align	64
.type	vis_const,#object
.size	vis_const,(.-vis_const)

.globl	sha1_block_data_order
sha1_block_data_order:
	save	%sp,-112,%sp
	add	%fp,0-256,%g1

1:	call	.+8
	add	%o7,vis_const-1b,%i3

	ldd	[%i3+0],%f34
	ldd	[%i3+8],%f36
	ldd	[%i3+16],%f38
	ldd	[%i3+24],%f40
	ldd	[%i3+32],%f32

	ld	[%i0+0],%o0
	and	%g1,-256,%g1
	ld	[%i0+4],%o1
	sub	%g1,0+112,%sp
	ld	[%i0+8],%o2
	and	%i1,7,%g4
	ld	[%i0+12],%o3
	and	%i1,-8,%i1
	ld	[%i0+16],%o4

	! X[16] is maintained in FP register bank
	.word	0x81b00304 !alignaddr	%g0,%g4,%g0
	ldd		[%i1+0],%f0
	sub		%i1,-64,%o5
	ldd		[%i1+8],%f2
	and		%o5,-64,%o5
	ldd		[%i1+16],%f4
	and		%o5,255,%o5
	ldd		[%i1+24],%f6
	add		%g1,%o5,%o5
	ldd		[%i1+32],%f8
	ldd		[%i1+40],%f10
	ldd		[%i1+48],%f12
	brz,pt		%g4,.Laligned
	ldd		[%i1+56],%f14

	ldd		[%i1+64],%f16
	.word	0x81b00902 !faligndata	%f0,%f2,%f0
	.word	0x85b08904 !faligndata	%f2,%f4,%f2
	.word	0x89b10906 !faligndata	%f4,%f6,%f4
	.word	0x8db18908 !faligndata	%f6,%f8,%f6
	.word	0x91b2090a !faligndata	%f8,%f10,%f8
	.word	0x95b2890c !faligndata	%f10,%f12,%f10
	.word	0x99b3090e !faligndata	%f12,%f14,%f12
	.word	0x9db38910 !faligndata	%f14,%f16,%f14

.Laligned:
	mov		5,%i3
	dec		1,%i2
	.word	0x81b0031b !alignaddr	%g0,%i3,%g0
	.word	0xa1b0ca40 !fpadd32	%f34,%f0,%f16
	.word	0xa5b0ca42 !fpadd32	%f34,%f2,%f18
	.word	0xa9b0ca44 !fpadd32	%f34,%f4,%f20
	.word	0xadb0ca46 !fpadd32	%f34,%f6,%f22
	.word	0xb1b0ca48 !fpadd32	%f34,%f8,%f24
	.word	0xb5b0ca4a !fpadd32	%f34,%f10,%f26
	.word	0xb9b0ca4c !fpadd32	%f34,%f12,%f28
	.word	0xbdb0ca4e !fpadd32	%f34,%f14,%f30
	std		%f16,[%o5+0]
	mov		%o0,%l0
	std		%f18,[%o5+8]
	mov		%o1,%l1
	std		%f20,[%o5+16]
	mov		%o2,%l2
	std		%f22,[%o5+24]
	mov		%o3,%l3
	std		%f24,[%o5+32]
	mov		%o4,%l4
	std		%f26,[%o5+40]
	.word	0x81b34da0 !fxors	%f13,%f0,%f0
	std		%f28,[%o5+48]
	ba		.Loop
	std		%f30,[%o5+56]
.align	32
.Loop:
	sll		%l0,5,%i3			!! 0
	and		%l2,%l1,%g5
	ld		[%o5+0],%o7
	 .word	0x83b38da1 !fxors	%f14,%f1,%f1! 0/ 0/ 0:X[1]^=X[14]
	srl		%l0,27,%i4
	add		%i3,%l4,%l4
	 .word	0xa5b08d88 !fxor	%f2,%f8,%f18! 1/ 1/ 1:Tmp=X[2,3]^X[8,9]
	sll		%l1,30,%i5
	add		%i4,%l4,%l4
	andn		%l3,%l1,%i4
	add		%o7,%l4,%l4
	 .word	0x81b48d80 !fxor	%f18,%f0,%f0		! 2/ 4/ 3:X[0,1]^=X[2,3]^X[8,9]
	srl		%l1,2,%l1
	or		%i4,%g5,%i4
	or		%i5,%l1,%l1
	add		%i4,%l4,%l4
	 .word	0xa5b00900 !faligndata	%f0,%f0,%f18		! 3/ 7/ 5:Tmp=X[0,1]>>>24
	sll		%l4,5,%i3			!! 1
	and		%l1,%l0,%g5
	ld		[%o5+4],%o7
	 .word	0x81b00a40 !fpadd32	%f0,%f0,%f0		! 4/ 8/ 6:X[0,1]<<=1
	srl		%l4,27,%i4
	add		%i3,%l3,%l3
	 .word	0xa5b486e1 !fmul8ulx16	%f18,%f32,%f18			! 5/10/ 7:Tmp>>=7, Tmp&=1
	sll		%l0,30,%i5
	add		%i4,%l3,%l3
	 .word	0xa9b0ca4e !fpadd32	%f34,%f14,%f20			!
	andn		%l2,%l0,%i4
	add		%o7,%l3,%l3
	 .word	0x85b3cda2 !fxors	%f15,%f2,%f2	!-1/-1/-1:X[0]^=X[13]
	srl		%l0,2,%l0
	or		%i4,%g5,%i4
	 .word	0x81b48d80 !fxor	%f18,%f0,%f0		! 8/14/10:X[0,1]|=Tmp
	or		%i5,%l0,%l0
	add		%i4,%l3,%l3
	sll		%l3,5,%i3			!! 2
	and		%l0,%l4,%g5
	ld		[%o5+8],%o7
	 .word	0x87b00da3 !fxors	%f0,%f3,%f3! 0/ 0/ 0:X[1]^=X[14]
	srl		%l3,27,%i4
	add		%i3,%l2,%l2
	 .word	0xa5b10d8a !fxor	%f4,%f10,%f18! 1/ 1/ 1:Tmp=X[2,3]^X[8,9]
	sll		%l4,30,%i5
	add		%i4,%l2,%l2
	andn		%l1,%l4,%i4
	add		%o7,%l2,%l2
	 .word	0x85b48d82 !fxor	%f18,%f2,%f2		! 2/ 4/ 3:X[0,1]^=X[2,3]^X[8,9]
	srl		%l4,2,%l4
	or		%i4,%g5,%i4
	or		%i5,%l4,%l4
	add		%i4,%l2,%l2
	 .word	0xa5b08902 !faligndata	%f2,%f2,%f18		! 3/ 7/ 5:Tmp=X[0,1]>>>24
	sll		%l2,5,%i3			!! 3
	and		%l4,%l3,%g5
	ld		[%o5+12],%o7
	 .word	0x85b08a42 !fpadd32	%f2,%f2,%f2		! 4/ 8/ 6:X[0,1]<<=1
	srl		%l2,27,%i4
	add		%i3,%l1,%l1
	 .word	0xa5b486e1 !fmul8ulx16	%f18,%f32,%f18			! 5/10/ 7:Tmp>>=7, Tmp&=1
	sll		%l3,30,%i5
	add		%i4,%l1,%l1
	 .word	0xa9b0ca40 !fpadd32	%f34,%f0,%f20			!
	andn		%l0,%l3,%i4
	add		%o7,%l1,%l1
	 .word	0x89b04da4 !fxors	%f1,%f4,%f4	!-1/-1/-1:X[0]^=X[13]
	srl		%l3,2,%l3
	or		%i4,%g5,%i4
	 .word	0x85b48d82 !fxor	%f18,%f2,%f2		! 8/14/10:X[0,1]|=Tmp
	or		%i5,%l3,%l3
	add		%i4,%l1,%l1
	 std		%f20,[%o5+0]		!
	sll		%l1,5,%i3			!! 4
	and		%l3,%l2,%g5
	ld		[%o5+16],%o7
	 .word	0x8bb08da5 !fxors	%f2,%f5,%f5! 0/ 0/ 0:X[1]^=X[14]
	srl		%l1,27,%i4
	add		%i3,%l0,%l0
	 .word	0xa5b18d8c !fxor	%f6,%f12,%f18! 1/ 1/ 1:Tmp=X[2,3]^X[8,9]
	sll		%l2,30,%i5
	add		%i4,%l0,%l0
	andn		%l4,%l2,%i4
	add		%o7,%l0,%l0
	 .word	0x89b48d84 !fxor	%f18,%f4,%f4		! 2/ 4/ 3:X[0,1]^=X[2,3]^X[8,9]
	srl		%l2,2,%l2
	or		%i4,%g5,%i4
	or		%i5,%l2,%l2
	add		%i4,%l0,%l0
	 .word	0xa5b10904 !faligndata	%f4,%f4,%f18		! 3/ 7/ 5:Tmp=X[0,1]>>>24
	sll		%l0,5,%i3			!! 5
	and		%l2,%l1,%g5
	ld		[%o5+20],%o7
	 .word	0x89b10a44 !fpadd32	%f4,%f4,%f4		! 4/ 8/ 6:X[0,1]<<=1
	srl		%l0,27,%i4
	add		%i3,%l4,%l4
	 .word	0xa5b486e1 !fmul8ulx16	%f18,%f32,%f18			! 5/10/ 7:Tmp>>=7, Tmp&=1
	sll		%l1,30,%i5
	add		%i4,%l4,%l4
	 .word	0xa9b0ca42 !fpadd32	%f34,%f2,%f20			!
	andn		%l3,%l1,%i4
	add		%o7,%l4,%l4
	 .word	0x8db0cda6 !fxors	%f3,%f6,%f6	!-1/-1/-1:X[0]^=X[13]
	srl		%l1,2,%l1
	or		%i4,%g5,%i4
	 .word	0x89b48d84 !fxor	%f18,%f4,%f4		! 8/14/10:X[0,1]|=Tmp
	or		%i5,%l1,%l1
	add		%i4,%l4,%l4
	 std		%f20,[%o5+8]		!
	sll		%l4,5,%i3			!! 6
	and		%l1,%l0,%g5
	ld		[%o5+24],%o7
	 .word	0x8fb10da7 !fxors	%f4,%f7,%f7! 0/ 0/ 0:X[1]^=X[14]
	srl		%l4,27,%i4
	add		%i3,%l3,%l3
	 .word	0xa5b20d8e !fxor	%f8,%f14,%f18! 1/ 1/ 1:Tmp=X[2,3]^X[8,9]
	sll		%l0,30,%i5
	add		%i4,%l3,%l3
	andn		%l2,%l0,%i4
	add		%o7,%l3,%l3
	 .word	0x8db48d86 !fxor	%f18,%f6,%f6		! 2/ 4/ 3:X[0,1]^=X[2,3]^X[8,9]
	srl		%l0,2,%l0
	or		%i4,%g5,%i4
	or		%i5,%l0,%l0
	add		%i4,%l3,%l3
	 .word	0xa5b18906 !faligndata	%f6,%f6,%f18		! 3/ 7/ 5:Tmp=X[0,1]>>>24
	sll		%l3,5,%i3			!! 7
	and		%l0,%l4,%g5
	ld		[%o5+28],%o7
	 .word	0x8db18a46 !fpadd32	%f6,%f6,%f6		! 4/ 8/ 6:X[0,1]<<=1
	srl		%l3,27,%i4
	add		%i3,%l2,%l2
	 .word	0xa5b486e1 !fmul8ulx16	%f18,%f32,%f18			! 5/10/ 7:Tmp>>=7, Tmp&=1
	sll		%l4,30,%i5
	add		%i4,%l2,%l2
	 .word	0xa9b14a44 !fpadd32	%f36,%f4,%f20			!
	andn		%l1,%l4,%i4
	add		%o7,%l2,%l2
	 .word	0x91b14da8 !fxors	%f5,%f8,%f8	!-1/-1/-1:X[0]^=X[13]
	srl		%l4,2,%l4
	or		%i4,%g5,%i4
	 .word	0x8db48d86 !fxor	%f18,%f6,%f6		! 8/14/10:X[0,1]|=Tmp
	or		%i5,%l4,%l4
	add		%i4,%l2,%l2
	 std		%f20,[%o5+16]		!
	sll		%l2,5,%i3			!! 8
	and		%l4,%l3,%g5
	ld		[%o5+32],%o7
	 .word	0x93b18da9 !fxors	%f6,%f9,%f9! 0/ 0/ 0:X[1]^=X[14]
	srl		%l2,27,%i4
	add		%i3,%l1,%l1
	 .word	0xa5b28d80 !fxor	%f10,%f0,%f18! 1/ 1/ 1:Tmp=X[2,3]^X[8,9]
	sll		%l3,30,%i5
	add		%i4,%l1,%l1
	andn		%l0,%l3,%i4
	add		%o7,%l1,%l1
	 .word	0x91b48d88 !fxor	%f18,%f8,%f8		! 2/ 4/ 3:X[0,1]^=X[2,3]^X[8,9]
	srl		%l3,2,%l3
	or		%i4,%g5,%i4
	or		%i5,%l3,%l3
	add		%i4,%l1,%l1
	 .word	0xa5b20908 !faligndata	%f8,%f8,%f18		! 3/ 7/ 5:Tmp=X[0,1]>>>24
	sll		%l1,5,%i3			!! 9
	and		%l3,%l2,%g5
	ld		[%o5+36],%o7
	 .word	0x91b20a48 !fpadd32	%f8,%f8,%f8		! 4/ 8/ 6:X[0,1]<<=1
	srl		%l1,27,%i4
	add		%i3,%l0,%l0
	 .word	0xa5b486e1 !fmul8ulx16	%f18,%f32,%f18			! 5/10/ 7:Tmp>>=7, Tmp&=1
	sll		%l2,30,%i5
	add		%i4,%l0,%l0
	 .word	0xa9b14a46 !fpadd32	%f36,%f6,%f20			!
	andn		%l4,%l2,%i4
	add		%o7,%l0,%l0
	 .word	0x95b1cdaa !fxors	%f7,%f10,%f10	!-1/-1/-1:X[0]^=X[13]
	srl		%l2,2,%l2
	or		%i4,%g5,%i4
	 .word	0x91b48d88 !fxor	%f18,%f8,%f8		! 8/14/10:X[0,1]|=Tmp
	or		%i5,%l2,%l2
	add		%i4,%l0,%l0
	 std		%f20,[%o5+24]		!
	sll		%l0,5,%i3			!! 10
	and		%l2,%l1,%g5
	ld		[%o5+40],%o7
	 .word	0x97b20dab !fxors	%f8,%f11,%f11! 0/ 0/ 0:X[1]^=X[14]
	srl		%l0,27,%i4
	add		%i3,%l4,%l4
	 .word	0xa5b30d82 !fxor	%f12,%f2,%f18! 1/ 1/ 1:Tmp=X[2,3]^X[8,9]
	sll		%l1,30,%i5
	add		%i4,%l4,%l4
	andn		%l3,%l1,%i4
	add		%o7,%l4,%l4
	 .word	0x95b48d8a !fxor	%f18,%f10,%f10		! 2/ 4/ 3:X[0,1]^=X[2,3]^X[8,9]
	srl		%l1,2,%l1
	or		%i4,%g5,%i4
	or		%i5,%l1,%l1
	add		%i4,%l4,%l4
	 .word	0xa5b2890a !faligndata	%f10,%f10,%f18		! 3/ 7/ 5:Tmp=X[0,1]>>>24
	sll		%l4,5,%i3			!! 11
	and		%l1,%l0,%g5
	ld		[%o5+44],%o7
	 .word	0x95b28a4a !fpadd32	%f10,%f10,%f10		! 4/ 8/ 6:X[0,1]<<=1
	srl		%l4,27,%i4
	add		%i3,%l3,%l3
	 .word	0xa5b486e1 !fmul8ulx16	%f18,%f32,%f18			! 5/10/ 7:Tmp>>=7, Tmp&=1
	sll		%l0,30,%i5
	add		%i4,%l3,%l3
	 .word	0xa9b14a48 !fpadd32	%f36,%f8,%f20			!
	andn		%l2,%l0,%i4
	add		%o7,%l3,%l3
	 .word	0x99b24dac !fxors	%f9,%f12,%f12	!-1/-1/-1:X[0]^=X[13]
	srl		%l0,2,%l0
	or		%i4,%g5,%i4
	 .word	0x95b48d8a !fxor	%f18,%f10,%f10		! 8/14/10:X[0,1]|=Tmp
	or		%i5,%l0,%l0
	add		%i4,%l3,%l3
	 std		%f20,[%o5+32]		!
	sll		%l3,5,%i3			!! 12
	and		%l0,%l4,%g5
	ld		[%o5+48],%o7
	 .word	0x9bb28dad !fxors	%f10,%f13,%f13! 0/ 0/ 0:X[1]^=X[14]
	srl		%l3,27,%i4
	add		%i3,%l2,%l2
	 .word	0xa5b38d84 !fxor	%f14,%f4,%f18! 1/ 1/ 1:Tmp=X[2,3]^X[8,9]
	sll		%l4,30,%i5
	add		%i4,%l2,%l2
	andn		%l1,%l4,%i4
	add		%o7,%l2,%l2
	 .word	0x99b48d8c !fxor	%f18,%f12,%f12		! 2/ 4/ 3:X[0,1]^=X[2,3]^X[8,9]
	srl		%l4,2,%l4
	or		%i4,%g5,%i4
	or		%i5,%l4,%l4
	add		%i4,%l2,%l2
	 .word	0xa5b3090c !faligndata	%f12,%f12,%f18		! 3/ 7/ 5:Tmp=X[0,1]>>>24
	sll		%l2,5,%i3			!! 13
	and		%l4,%l3,%g5
	ld		[%o5+52],%o7
	 .word	0x99b30a4c !fpadd32	%f12,%f12,%f12		! 4/ 8/ 6:X[0,1]<<=1
	srl		%l2,27,%i4
	add		%i3,%l1,%l1
	 .word	0xa5b486e1 !fmul8ulx16	%f18,%f32,%f18			! 5/10/ 7:Tmp>>=7, Tmp&=1
	sll		%l3,30,%i5
	add		%i4,%l1,%l1
	 .word	0xa9b14a4a !fpadd32	%f36,%f10,%f20			!
	andn		%l0,%l3,%i4
	add		%o7,%l1,%l1
	 .word	0x9db2cdae !fxors	%f11,%f14,%f14	!-1/-1/-1:X[0]^=X[13]
	srl		%l3,2,%l3
	or		%i4,%g5,%i4
	 .word	0x99b48d8c !fxor	%f18,%f12,%f12		! 8/14/10:X[0,1]|=Tmp
	or		%i5,%l3,%l3
	add		%i4,%l1,%l1
	 std		%f20,[%o5+40]		!
	sll		%l1,5,%i3			!! 14
	and		%l3,%l2,%g5
	ld		[%o5+56],%o7
	 .word	0x9fb30daf !fxors	%f12,%f15,%f15! 0/ 0/ 0:X[1]^=X[14]
	srl		%l1,27,%i4
	add		%i3,%l0,%l0
	 .word	0xa5b00d86 !fxor	%f0,%f6,%f18! 1/ 1/ 1:Tmp=X[2,3]^X[8,9]
	sll		%l2,30,%i5
	add		%i4,%l0,%l0
	andn		%l4,%l2,%i4
	add		%o7,%l0,%l0
	 .word	0x9db48d8e !fxor	%f18,%f14,%f14		! 2/ 4/ 3:X[0,1]^=X[2,3]^X[8,9]
	srl		%l2,2,%l2
	or		%i4,%g5,%i4
	or		%i5,%l2,%l2
	add		%i4,%l0,%l0
	 .word	0xa5b3890e !faligndata	%f14,%f14,%f18		! 3/ 7/ 5:Tmp=X[0,1]>>>24
	sll		%l0,5,%i3			!! 15
	and		%l2,%l1,%g5
	ld		[%o5+60],%o7
	 .word	0x9db38a4e !fpadd32	%f14,%f14,%f14		! 4/ 8/ 6:X[0,1]<<=1
	srl		%l0,27,%i4
	add		%i3,%l4,%l4
	 .word	0xa5b486e1 !fmul8ulx16	%f18,%f32,%f18			! 5/10/ 7:Tmp>>=7, Tmp&=1
	sll		%l1,30,%i5
	add		%i4,%l4,%l4
	 .word	0xa9b14a4c !fpadd32	%f36,%f12,%f20			!
	andn		%l3,%l1,%i4
	add		%o7,%l4,%l4
	 .word	0x81b34da0 !fxors	%f13,%f0,%f0	!-1/-1/-1:X[0]^=X[13]
	srl		%l1,2,%l1
	or		%i4,%g5,%i4
	 .word	0x9db48d8e !fxor	%f18,%f14,%f14		! 8/14/10:X[0,1]|=Tmp
	or		%i5,%l1,%l1
	add		%i4,%l4,%l4
	 std		%f20,[%o5+48]		!
	sll		%l4,5,%i3			!! 16
	and		%l1,%l0,%g5
	ld		[%o5+0],%o7
	 .word	0x83b38da1 !fxors	%f14,%f1,%f1! 0/ 0/ 0:X[1]^=X[14]
	srl		%l4,27,%i4
	add		%i3,%l3,%l3
	 .word	0xa5b08d88 !fxor	%f2,%f8,%f18! 1/ 1/ 1:Tmp=X[2,3]^X[8,9]
	sll		%l0,30,%i5
	add		%i4,%l3,%l3
	andn		%l2,%l0,%i4
	add		%o7,%l3,%l3
	 .word	0x81b48d80 !fxor	%f18,%f0,%f0		! 2/ 4/ 3:X[0,1]^=X[2,3]^X[8,9]
	srl		%l0,2,%l0
	or		%i4,%g5,%i4
	or		%i5,%l0,%l0
	add		%i4,%l3,%l3
	 .word	0xa5b00900 !faligndata	%f0,%f0,%f18		! 3/ 7/ 5:Tmp=X[0,1]>>>24
	sll		%l3,5,%i3			!! 17
	and		%l0,%l4,%g5
	ld		[%o5+4],%o7
	 .word	0x81b00a40 !fpadd32	%f0,%f0,%f0		! 4/ 8/ 6:X[0,1]<<=1
	srl		%l3,27,%i4
	add		%i3,%l2,%l2
	 .word	0xa5b486e1 !fmul8ulx16	%f18,%f32,%f18			! 5/10/ 7:Tmp>>=7, Tmp&=1
	sll		%l4,30,%i5
	add		%i4,%l2,%l2
	 .word	0xa9b14a4e !fpadd32	%f36,%f14,%f20			!
	andn		%l1,%l4,%i4
	add		%o7,%l2,%l2
	 .word	0x85b3cda2 !fxors	%f15,%f2,%f2	!-1/-1/-1:X[0]^=X[13]
	srl		%l4,2,%l4
	or		%i4,%g5,%i4
	 .word	0x81b48d80 !fxor	%f18,%f0,%f0		! 8/14/10:X[0,1]|=Tmp
	or		%i5,%l4,%l4
	add		%i4,%l2,%l2
	 std		%f20,[%o5+56]		!
	sll		%l2,5,%i3			!! 18
	and		%l4,%l3,%g5
	ld		[%o5+8],%o7
	 .word	0x87b00da3 !fxors	%f0,%f3,%f3! 0/ 0/ 0:X[1]^=X[14]
	srl		%l2,27,%i4
	add		%i3,%l1,%l1
	 .word	0xa5b10d8a !fxor	%f4,%f10,%f18! 1/ 1/ 1:Tmp=X[2,3]^X[8,9]
	sll		%l3,30,%i5
	add		%i4,%l1,%l1
	andn		%l0,%l3,%i4
	add		%o7,%l1,%l1
	 .word	0x85b48d82 !fxor	%f18,%f2,%f2		! 2/ 4/ 3:X[0,1]^=X[2,3]^X[8,9]
	srl		%l3,2,%l3
	or		%i4,%g5,%i4
	or		%i5,%l3,%l3
	add		%i4,%l1,%l1
	 .word	0xa5b08902 !faligndata	%f2,%f2,%f18		! 3/ 7/ 5:Tmp=X[0,1]>>>24
	sll		%l1,5,%i3			!! 19
	and		%l3,%l2,%g5
	ld		[%o5+12],%o7
	 .word	0x85b08a42 !fpadd32	%f2,%f2,%f2		! 4/ 8/ 6:X[0,1]<<=1
	srl		%l1,27,%i4
	add		%i3,%l0,%l0
	 .word	0xa5b486e1 !fmul8ulx16	%f18,%f32,%f18			! 5/10/ 7:Tmp>>=7, Tmp&=1
	sll		%l2,30,%i5
	add		%i4,%l0,%l0
	 .word	0xa9b14a40 !fpadd32	%f36,%f0,%f20			!
	andn		%l4,%l2,%i4
	add		%o7,%l0,%l0
	 .word	0x89b04da4 !fxors	%f1,%f4,%f4	!-1/-1/-1:X[0]^=X[13]
	srl		%l2,2,%l2
	or		%i4,%g5,%i4
	 .word	0x85b48d82 !fxor	%f18,%f2,%f2		! 8/14/10:X[0,1]|=Tmp
	or		%i5,%l2,%l2
	add		%i4,%l0,%l0
	 std		%f20,[%o5+0]		!
	sll		%l0,5,%i3			!! 20
	ld		[%o5+16],%o7
	 .word	0x8bb08da5 !fxors	%f2,%f5,%f5! 0/ 0/ 0:X[1]^=X[14]
	srl		%l0,27,%i4
	add		%i3,%l4,%l4
	 .word	0xa5b18d8c !fxor	%f6,%f12,%f18! 1/ 1/ 1:Tmp=X[2,3]^X[8,9]
	xor		%l2,%l1,%i3
	add		%i4,%l4,%l4
	sll		%l1,30,%i5
	xor		%l3,%i3,%i4
	 .word	0x89b48d84 !fxor	%f18,%f4,%f4		! 2/ 4/ 3:X[0,1]^=X[2,3]^X[8,9]
	srl		%l1,2,%l1
	add		%i4,%l4,%l4
	or		%i5,%l1,%l1
	add		%o7,%l4,%l4
	 .word	0xa5b10904 !faligndata	%f4,%f4,%f18		! 3/ 7/ 5:Tmp=X[0,1]>>>24
	sll		%l4,5,%i3			!! 21
	ld		[%o5+20],%o7
	 .word	0x89b10a44 !fpadd32	%f4,%f4,%f4		! 4/ 8/ 6:X[0,1]<<=1
	srl		%l4,27,%i4
	add		%i3,%l3,%l3
	 .word	0xa5b486e1 !fmul8ulx16	%f18,%f32,%f18			! 5/10/ 7:Tmp>>=7, Tmp&=1
	xor		%l1,%l0,%i3
	add		%i4,%l3,%l3
	 .word	0xa9b14a42 !fpadd32	%f36,%f2,%f20			!
	sll		%l0,30,%i5
	xor		%l2,%i3,%i4
	 .word	0x8db0cda6 !fxors	%f3,%f6,%f6	!-1/-1/-1:X[0]^=X[13]
	srl		%l0,2,%l0
	add		%i4,%l3,%l3
	 .word	0x89b48d84 !fxor	%f18,%f4,%f4		! 8/14/10:X[0,1]|=Tmp
	or		%i5,%l0,%l0
	add		%o7,%l3,%l3
	 std		%f20,[%o5+8]		!
	sll		%l3,5,%i3			!! 22
	ld		[%o5+24],%o7
	 .word	0x8fb10da7 !fxors	%f4,%f7,%f7! 0/ 0/ 0:X[1]^=X[14]
	srl		%l3,27,%i4
	add		%i3,%l2,%l2
	 .word	0xa5b20d8e !fxor	%f8,%f14,%f18! 1/ 1/ 1:Tmp=X[2,3]^X[8,9]
	xor		%l0,%l4,%i3
	add		%i4,%l2,%l2
	sll		%l4,30,%i5
	xor		%l1,%i3,%i4
	 .word	0x8db48d86 !fxor	%f18,%f6,%f6		! 2/ 4/ 3:X[0,1]^=X[2,3]^X[8,9]
	srl		%l4,2,%l4
	add		%i4,%l2,%l2
	or		%i5,%l4,%l4
	add		%o7,%l2,%l2
	 .word	0xa5b18906 !faligndata	%f6,%f6,%f18		! 3/ 7/ 5:Tmp=X[0,1]>>>24
	sll		%l2,5,%i3			!! 23
	ld		[%o5+28],%o7
	 .word	0x8db18a46 !fpadd32	%f6,%f6,%f6		! 4/ 8/ 6:X[0,1]<<=1
	srl		%l2,27,%i4
	add		%i3,%l1,%l1
	 .word	0xa5b486e1 !fmul8ulx16	%f18,%f32,%f18			! 5/10/ 7:Tmp>>=7, Tmp&=1
	xor		%l4,%l3,%i3
	add		%i4,%l1,%l1
	 .word	0xa9b14a44 !fpadd32	%f36,%f4,%f20			!
	sll		%l3,30,%i5
	xor		%l0,%i3,%i4
	 .word	0x91b14da8 !fxors	%f5,%f8,%f8	!-1/-1/-1:X[0]^=X[13]
	srl		%l3,2,%l3
	add		%i4,%l1,%l1
	 .word	0x8db48d86 !fxor	%f18,%f6,%f6		! 8/14/10:X[0,1]|=Tmp
	or		%i5,%l3,%l3
	add		%o7,%l1,%l1
	 std		%f20,[%o5+16]		!
	sll		%l1,5,%i3			!! 24
	ld		[%o5+32],%o7
	 .word	0x93b18da9 !fxors	%f6,%f9,%f9! 0/ 0/ 0:X[1]^=X[14]
	srl		%l1,27,%i4
	add		%i3,%l0,%l0
	 .word	0xa5b28d80 !fxor	%f10,%f0,%f18! 1/ 1/ 1:Tmp=X[2,3]^X[8,9]
	xor		%l3,%l2,%i3
	add		%i4,%l0,%l0
	sll		%l2,30,%i5
	xor		%l4,%i3,%i4
	 .word	0x91b48d88 !fxor	%f18,%f8,%f8		! 2/ 4/ 3:X[0,1]^=X[2,3]^X[8,9]
	srl		%l2,2,%l2
	add		%i4,%l0,%l0
	or		%i5,%l2,%l2
	add		%o7,%l0,%l0
	 .word	0xa5b20908 !faligndata	%f8,%f8,%f18		! 3/ 7/ 5:Tmp=X[0,1]>>>24
	sll		%l0,5,%i3			!! 25
	ld		[%o5+36],%o7
	 .word	0x91b20a48 !fpadd32	%f8,%f8,%f8		! 4/ 8/ 6:X[0,1]<<=1
	srl		%l0,27,%i4
	add		%i3,%l4,%l4
	 .word	0xa5b486e1 !fmul8ulx16	%f18,%f32,%f18			! 5/10/ 7:Tmp>>=7, Tmp&=1
	xor		%l2,%l1,%i3
	add		%i4,%l4,%l4
	 .word	0xa9b14a46 !fpadd32	%f36,%f6,%f20			!
	sll		%l1,30,%i5
	xor		%l3,%i3,%i4
	 .word	0x95b1cdaa !fxors	%f7,%f10,%f10	!-1/-1/-1:X[0]^=X[13]
	srl		%l1,2,%l1
	add		%i4,%l4,%l4
	 .word	0x91b48d88 !fxor	%f18,%f8,%f8		! 8/14/10:X[0,1]|=Tmp
	or		%i5,%l1,%l1
	add		%o7,%l4,%l4
	 std		%f20,[%o5+24]		!
	sll		%l4,5,%i3			!! 26
	ld		[%o5+40],%o7
	 .word	0x97b20dab !fxors	%f8,%f11,%f11! 0/ 0/ 0:X[1]^=X[14]
	srl		%l4,27,%i4
	add		%i3,%l3,%l3
	 .word	0xa5b30d82 !fxor	%f12,%f2,%f18! 1/ 1/ 1:Tmp=X[2,3]^X[8,9]
	xor		%l1,%l0,%i3
	add		%i4,%l3,%l3
	sll		%l0,30,%i5
	xor		%l2,%i3,%i4
	 .word	0x95b48d8a !fxor	%f18,%f10,%f10		! 2/ 4/ 3:X[0,1]^=X[2,3]^X[8,9]
	srl		%l0,2,%l0
	add		%i4,%l3,%l3
	or		%i5,%l0,%l0
	add		%o7,%l3,%l3
	 .word	0xa5b2890a !faligndata	%f10,%f10,%f18		! 3/ 7/ 5:Tmp=X[0,1]>>>24
	sll		%l3,5,%i3			!! 27
	ld		[%o5+44],%o7
	 .word	0x95b28a4a !fpadd32	%f10,%f10,%f10		! 4/ 8/ 6:X[0,1]<<=1
	srl		%l3,27,%i4
	add		%i3,%l2,%l2
	 .word	0xa5b486e1 !fmul8ulx16	%f18,%f32,%f18			! 5/10/ 7:Tmp>>=7, Tmp&=1
	xor		%l0,%l4,%i3
	add		%i4,%l2,%l2
	 .word	0xa9b1ca48 !fpadd32	%f38,%f8,%f20			!
	sll		%l4,30,%i5
	xor		%l1,%i3,%i4
	 .word	0x99b24dac !fxors	%f9,%f12,%f12	!-1/-1/-1:X[0]^=X[13]
	srl		%l4,2,%l4
	add		%i4,%l2,%l2
	 .word	0x95b48d8a !fxor	%f18,%f10,%f10		! 8/14/10:X[0,1]|=Tmp
	or		%i5,%l4,%l4
	add		%o7,%l2,%l2
	 std		%f20,[%o5+32]		!
	sll		%l2,5,%i3			!! 28
	ld		[%o5+48],%o7
	 .word	0x9bb28dad !fxors	%f10,%f13,%f13! 0/ 0/ 0:X[1]^=X[14]
	srl		%l2,27,%i4
	add		%i3,%l1,%l1
	 .word	0xa5b38d84 !fxor	%f14,%f4,%f18! 1/ 1/ 1:Tmp=X[2,3]^X[8,9]
	xor		%l4,%l3,%i3
	add		%i4,%l1,%l1
	sll		%l3,30,%i5
	xor		%l0,%i3,%i4
	 .word	0x99b48d8c !fxor	%f18,%f12,%f12		! 2/ 4/ 3:X[0,1]^=X[2,3]^X[8,9]
	srl		%l3,2,%l3
	add		%i4,%l1,%l1
	or		%i5,%l3,%l3
	add		%o7,%l1,%l1
	 .word	0xa5b3090c !faligndata	%f12,%f12,%f18		! 3/ 7/ 5:Tmp=X[0,1]>>>24
	sll		%l1,5,%i3			!! 29
	ld		[%o5+52],%o7
	 .word	0x99b30a4c !fpadd32	%f12,%f12,%f12		! 4/ 8/ 6:X[0,1]<<=1
	srl		%l1,27,%i4
	add		%i3,%l0,%l0
	 .word	0xa5b486e1 !fmul8ulx16	%f18,%f32,%f18			! 5/10/ 7:Tmp>>=7, Tmp&=1
	xor		%l3,%l2,%i3
	add		%i4,%l0,%l0
	 .word	0xa9b1ca4a !fpadd32	%f38,%f10,%f20			!
	sll		%l2,30,%i5
	xor		%l4,%i3,%i4
	 .word	0x9db2cdae !fxors	%f11,%f14,%f14	!-1/-1/-1:X[0]^=X[13]
	srl		%l2,2,%l2
	add		%i4,%l0,%l0
	 .word	0x99b48d8c !fxor	%f18,%f12,%f12		! 8/14/10:X[0,1]|=Tmp
	or		%i5,%l2,%l2
	add		%o7,%l0,%l0
	 std		%f20,[%o5+40]		!
	sll		%l0,5,%i3			!! 30
	ld		[%o5+56],%o7
	 .word	0x9fb30daf !fxors	%f12,%f15,%f15! 0/ 0/ 0:X[1]^=X[14]
	srl		%l0,27,%i4
	add		%i3,%l4,%l4
	 .word	0xa5b00d86 !fxor	%f0,%f6,%f18! 1/ 1/ 1:Tmp=X[2,3]^X[8,9]
	xor		%l2,%l1,%i3
	add		%i4,%l4,%l4
	sll		%l1,30,%i5
	xor		%l3,%i3,%i4
	 .word	0x9db48d8e !fxor	%f18,%f14,%f14		! 2/ 4/ 3:X[0,1]^=X[2,3]^X[8,9]
	srl		%l1,2,%l1
	add		%i4,%l4,%l4
	or		%i5,%l1,%l1
	add		%o7,%l4,%l4
	 .word	0xa5b3890e !faligndata	%f14,%f14,%f18		! 3/ 7/ 5:Tmp=X[0,1]>>>24
	sll		%l4,5,%i3			!! 31
	ld		[%o5+60],%o7
	 .word	0x9db38a4e !fpadd32	%f14,%f14,%f14		! 4/ 8/ 6:X[0,1]<<=1
	srl		%l4,27,%i4
	add		%i3,%l3,%l3
	 .word	0xa5b486e1 !fmul8ulx16	%f18,%f32,%f18			! 5/10/ 7:Tmp>>=7, Tmp&=1
	xor		%l1,%l0,%i3
	add		%i4,%l3,%l3
	 .word	0xa9b1ca4c !fpadd32	%f38,%f12,%f20			!
	sll		%l0,30,%i5
	xor		%l2,%i3,%i4
	 .word	0x81b34da0 !fxors	%f13,%f0,%f0	!-1/-1/-1:X[0]^=X[13]
	srl		%l0,2,%l0
	add		%i4,%l3,%l3
	 .word	0x9db48d8e !fxor	%f18,%f14,%f14		! 8/14/10:X[0,1]|=Tmp
	or		%i5,%l0,%l0
	add		%o7,%l3,%l3
	 std		%f20,[%o5+48]		!
	sll		%l3,5,%i3			!! 32
	ld		[%o5+0],%o7
	 .word	0x83b38da1 !fxors	%f14,%f1,%f1! 0/ 0/ 0:X[1]^=X[14]
	srl		%l3,27,%i4
	add		%i3,%l2,%l2
	 .word	0xa5b08d88 !fxor	%f2,%f8,%f18! 1/ 1/ 1:Tmp=X[2,3]^X[8,9]
	xor		%l0,%l4,%i3
	add		%i4,%l2,%l2
	sll		%l4,30,%i5
	xor		%l1,%i3,%i4
	 .word	0x81b48d80 !fxor	%f18,%f0,%f0		! 2/ 4/ 3:X[0,1]^=X[2,3]^X[8,9]
	srl		%l4,2,%l4
	add		%i4,%l2,%l2
	or		%i5,%l4,%l4
	add		%o7,%l2,%l2
	 .word	0xa5b00900 !faligndata	%f0,%f0,%f18		! 3/ 7/ 5:Tmp=X[0,1]>>>24
	sll		%l2,5,%i3			!! 33
	ld		[%o5+4],%o7
	 .word	0x81b00a40 !fpadd32	%f0,%f0,%f0		! 4/ 8/ 6:X[0,1]<<=1
	srl		%l2,27,%i4
	add		%i3,%l1,%l1
	 .word	0xa5b486e1 !fmul8ulx16	%f18,%f32,%f18			! 5/10/ 7:Tmp>>=7, Tmp&=1
	xor		%l4,%l3,%i3
	add		%i4,%l1,%l1
	 .word	0xa9b1ca4e !fpadd32	%f38,%f14,%f20			!
	sll		%l3,30,%i5
	xor		%l0,%i3,%i4
	 .word	0x85b3cda2 !fxors	%f15,%f2,%f2	!-1/-1/-1:X[0]^=X[13]
	srl		%l3,2,%l3
	add		%i4,%l1,%l1
	 .word	0x81b48d80 !fxor	%f18,%f0,%f0		! 8/14/10:X[0,1]|=Tmp
	or		%i5,%l3,%l3
	add		%o7,%l1,%l1
	 std		%f20,[%o5+56]		!
	sll		%l1,5,%i3			!! 34
	ld		[%o5+8],%o7
	 .word	0x87b00da3 !fxors	%f0,%f3,%f3! 0/ 0/ 0:X[1]^=X[14]
	srl		%l1,27,%i4
	add		%i3,%l0,%l0
	 .word	0xa5b10d8a !fxor	%f4,%f10,%f18! 1/ 1/ 1:Tmp=X[2,3]^X[8,9]
	xor		%l3,%l2,%i3
	add		%i4,%l0,%l0
	sll		%l2,30,%i5
	xor		%l4,%i3,%i4
	 .word	0x85b48d82 !fxor	%f18,%f2,%f2		! 2/ 4/ 3:X[0,1]^=X[2,3]^X[8,9]
	srl		%l2,2,%l2
	add		%i4,%l0,%l0
	or		%i5,%l2,%l2
	add		%o7,%l0,%l0
	 .word	0xa5b08902 !faligndata	%f2,%f2,%f18		! 3/ 7/ 5:Tmp=X[0,1]>>>24
	sll		%l0,5,%i3			!! 35
	ld		[%o5+12],%o7
	 .word	0x85b08a42 !fpadd32	%f2,%f2,%f2		! 4/ 8/ 6:X[0,1]<<=1
	srl		%l0,27,%i4
	add		%i3,%l4,%l4
	 .word	0xa5b486e1 !fmul8ulx16	%f18,%f32,%f18			! 5/10/ 7:Tmp>>=7, Tmp&=1
	xor		%l2,%l1,%i3
	add		%i4,%l4,%l4
	 .word	0xa9b1ca40 !fpadd32	%f38,%f0,%f20			!
	sll		%l1,30,%i5
	xor		%l3,%i3,%i4
	 .word	0x89b04da4 !fxors	%f1,%f4,%f4	!-1/-1/-1:X[0]^=X[13]
	srl		%l1,2,%l1
	add		%i4,%l4,%l4
	 .word	0x85b48d82 !fxor	%f18,%f2,%f2		! 8/14/10:X[0,1]|=Tmp
	or		%i5,%l1,%l1
	add		%o7,%l4,%l4
	 std		%f20,[%o5+0]		!
	sll		%l4,5,%i3			!! 36
	ld		[%o5+16],%o7
	 .word	0x8bb08da5 !fxors	%f2,%f5,%f5! 0/ 0/ 0:X[1]^=X[14]
	srl		%l4,27,%i4
	add		%i3,%l3,%l3
	 .word	0xa5b18d8c !fxor	%f6,%f12,%f18! 1/ 1/ 1:Tmp=X[2,3]^X[8,9]
	xor		%l1,%l0,%i3
	add		%i4,%l3,%l3
	sll		%l0,30,%i5
	xor		%l2,%i3,%i4
	 .word	0x89b48d84 !fxor	%f18,%f4,%f4		! 2/ 4/ 3:X[0,1]^=X[2,3]^X[8,9]
	srl		%l0,2,%l0
	add		%i4,%l3,%l3
	or		%i5,%l0,%l0
	add		%o7,%l3,%l3
	 .word	0xa5b10904 !faligndata	%f4,%f4,%f18		! 3/ 7/ 5:Tmp=X[0,1]>>>24
	sll		%l3,5,%i3			!! 37
	ld		[%o5+20],%o7
	 .word	0x89b10a44 !fpadd32	%f4,%f4,%f4		! 4/ 8/ 6:X[0,1]<<=1
	srl		%l3,27,%i4
	add		%i3,%l2,%l2
	 .word	0xa5b486e1 !fmul8ulx16	%f18,%f32,%f18			! 5/10/ 7:Tmp>>=7, Tmp&=1
	xor		%l0,%l4,%i3
	add		%i4,%l2,%l2
	 .word	0xa9b1ca42 !fpadd32	%f38,%f2,%f20			!
	sll		%l4,30,%i5
	xor		%l1,%i3,%i4
	 .word	0x8db0cda6 !fxors	%f3,%f6,%f6	!-1/-1/-1:X[0]^=X[13]
	srl		%l4,2,%l4
	add		%i4,%l2,%l2
	 .word	0x89b48d84 !fxor	%f18,%f4,%f4		! 8/14/10:X[0,1]|=Tmp
	or		%i5,%l4,%l4
	add		%o7,%l2,%l2
	 std		%f20,[%o5+8]		!
	sll		%l2,5,%i3			!! 38
	ld		[%o5+24],%o7
	 .word	0x8fb10da7 !fxors	%f4,%f7,%f7! 0/ 0/ 0:X[1]^=X[14]
	srl		%l2,27,%i4
	add		%i3,%l1,%l1
	 .word	0xa5b20d8e !fxor	%f8,%f14,%f18! 1/ 1/ 1:Tmp=X[2,3]^X[8,9]
	xor		%l4,%l3,%i3
	add		%i4,%l1,%l1
	sll		%l3,30,%i5
	xor		%l0,%i3,%i4
	 .word	0x8db48d86 !fxor	%f18,%f6,%f6		! 2/ 4/ 3:X[0,1]^=X[2,3]^X[8,9]
	srl		%l3,2,%l3
	add		%i4,%l1,%l1
	or		%i5,%l3,%l3
	add		%o7,%l1,%l1
	 .word	0xa5b18906 !faligndata	%f6,%f6,%f18		! 3/ 7/ 5:Tmp=X[0,1]>>>24
	sll		%l1,5,%i3			!! 39
	ld		[%o5+28],%o7
	 .word	0x8db18a46 !fpadd32	%f6,%f6,%f6		! 4/ 8/ 6:X[0,1]<<=1
	srl		%l1,27,%i4
	add		%i3,%l0,%l0
	 .word	0xa5b486e1 !fmul8ulx16	%f18,%f32,%f18			! 5/10/ 7:Tmp>>=7, Tmp&=1
	xor		%l3,%l2,%i3
	add		%i4,%l0,%l0
	 .word	0xa9b1ca44 !fpadd32	%f38,%f4,%f20			!
	sll		%l2,30,%i5
	xor		%l4,%i3,%i4
	 .word	0x91b14da8 !fxors	%f5,%f8,%f8	!-1/-1/-1:X[0]^=X[13]
	srl		%l2,2,%l2
	add		%i4,%l0,%l0
	 .word	0x8db48d86 !fxor	%f18,%f6,%f6		! 8/14/10:X[0,1]|=Tmp
	or		%i5,%l2,%l2
	add		%o7,%l0,%l0
	 std		%f20,[%o5+16]		!
	sll		%l0,5,%i3			!! 40
	ld		[%o5+32],%o7
	 .word	0x93b18da9 !fxors	%f6,%f9,%f9! 0/ 0/ 0:X[1]^=X[14]
	srl		%l0,27,%i4
	add		%i3,%l4,%l4
	 .word	0xa5b28d80 !fxor	%f10,%f0,%f18! 1/ 1/ 1:Tmp=X[2,3]^X[8,9]
	and		%l2,%l1,%i3
	add		%i4,%l4,%l4
	sll		%l1,30,%i5
	or		%l2,%l1,%i4
	 .word	0x91b48d88 !fxor	%f18,%f8,%f8		! 2/ 4/ 3:X[0,1]^=X[2,3]^X[8,9]
	srl		%l1,2,%l1
	and		%l3,%i4,%i4
	add		%o7,%l4,%l4
	or		%i4,%i3,%i4
	 .word	0xa5b20908 !faligndata	%f8,%f8,%f18		! 3/ 7/ 5:Tmp=X[0,1]>>>24
	or		%i5,%l1,%l1
	add		%i4,%l4,%l4
	 .word	0x91b20a48 !fpadd32	%f8,%f8,%f8		! 4/ 8/ 6:X[0,1]<<=1
	sll		%l4,5,%i3			!! 41
	ld		[%o5+36],%o7
	srl		%l4,27,%i4
	add		%i3,%l3,%l3
	 .word	0xa5b486e1 !fmul8ulx16	%f18,%f32,%f18			! 5/10/ 7:Tmp>>=7, Tmp&=1
	and		%l1,%l0,%i3
	add		%i4,%l3,%l3
	 .word	0xa9b1ca46 !fpadd32	%f38,%f6,%f20			!
	sll		%l0,30,%i5
	or		%l1,%l0,%i4
	 .word	0x95b1cdaa !fxors	%f7,%f10,%f10	!-1/-1/-1:X[0]^=X[13]
	srl		%l0,2,%l0
	and		%l2,%i4,%i4
	 .word	0x91b48d88 !fxor	%f18,%f8,%f8		! 8/14/10:X[0,1]|=Tmp
	add		%o7,%l3,%l3
	or		%i4,%i3,%i4
	or		%i5,%l0,%l0
	add		%i4,%l3,%l3
	 std		%f20,[%o5+24]		!
	sll		%l3,5,%i3			!! 42
	ld		[%o5+40],%o7
	 .word	0x97b20dab !fxors	%f8,%f11,%f11! 0/ 0/ 0:X[1]^=X[14]
	srl		%l3,27,%i4
	add		%i3,%l2,%l2
	 .word	0xa5b30d82 !fxor	%f12,%f2,%f18! 1/ 1/ 1:Tmp=X[2,3]^X[8,9]
	and		%l0,%l4,%i3
	add		%i4,%l2,%l2
	sll		%l4,30,%i5
	or		%l0,%l4,%i4
	 .word	0x95b48d8a !fxor	%f18,%f10,%f10		! 2/ 4/ 3:X[0,1]^=X[2,3]^X[8,9]
	srl		%l4,2,%l4
	and		%l1,%i4,%i4
	add		%o7,%l2,%l2
	or		%i4,%i3,%i4
	 .word	0xa5b2890a !faligndata	%f10,%f10,%f18		! 3/ 7/ 5:Tmp=X[0,1]>>>24
	or		%i5,%l4,%l4
	add		%i4,%l2,%l2
	 .word	0x95b28a4a !fpadd32	%f10,%f10,%f10		! 4/ 8/ 6:X[0,1]<<=1
	sll		%l2,5,%i3			!! 43
	ld		[%o5+44],%o7
	srl		%l2,27,%i4
	add		%i3,%l1,%l1
	 .word	0xa5b486e1 !fmul8ulx16	%f18,%f32,%f18			! 5/10/ 7:Tmp>>=7, Tmp&=1
	and		%l4,%l3,%i3
	add		%i4,%l1,%l1
	 .word	0xa9b1ca48 !fpadd32	%f38,%f8,%f20			!
	sll		%l3,30,%i5
	or		%l4,%l3,%i4
	 .word	0x99b24dac !fxors	%f9,%f12,%f12	!-1/-1/-1:X[0]^=X[13]
	srl		%l3,2,%l3
	and		%l0,%i4,%i4
	 .word	0x95b48d8a !fxor	%f18,%f10,%f10		! 8/14/10:X[0,1]|=Tmp
	add		%o7,%l1,%l1
	or		%i4,%i3,%i4
	or		%i5,%l3,%l3
	add		%i4,%l1,%l1
	 std		%f20,[%o5+32]		!
	sll		%l1,5,%i3			!! 44
	ld		[%o5+48],%o7
	 .word	0x9bb28dad !fxors	%f10,%f13,%f13! 0/ 0/ 0:X[1]^=X[14]
	srl		%l1,27,%i4
	add		%i3,%l0,%l0
	 .word	0xa5b38d84 !fxor	%f14,%f4,%f18! 1/ 1/ 1:Tmp=X[2,3]^X[8,9]
	and		%l3,%l2,%i3
	add		%i4,%l0,%l0
	sll		%l2,30,%i5
	or		%l3,%l2,%i4
	 .word	0x99b48d8c !fxor	%f18,%f12,%f12		! 2/ 4/ 3:X[0,1]^=X[2,3]^X[8,9]
	srl		%l2,2,%l2
	and		%l4,%i4,%i4
	add		%o7,%l0,%l0
	or		%i4,%i3,%i4
	 .word	0xa5b3090c !faligndata	%f12,%f12,%f18		! 3/ 7/ 5:Tmp=X[0,1]>>>24
	or		%i5,%l2,%l2
	add		%i4,%l0,%l0
	 .word	0x99b30a4c !fpadd32	%f12,%f12,%f12		! 4/ 8/ 6:X[0,1]<<=1
	sll		%l0,5,%i3			!! 45
	ld		[%o5+52],%o7
	srl		%l0,27,%i4
	add		%i3,%l4,%l4
	 .word	0xa5b486e1 !fmul8ulx16	%f18,%f32,%f18			! 5/10/ 7:Tmp>>=7, Tmp&=1
	and		%l2,%l1,%i3
	add		%i4,%l4,%l4
	 .word	0xa9b1ca4a !fpadd32	%f38,%f10,%f20			!
	sll		%l1,30,%i5
	or		%l2,%l1,%i4
	 .word	0x9db2cdae !fxors	%f11,%f14,%f14	!-1/-1/-1:X[0]^=X[13]
	srl		%l1,2,%l1
	and		%l3,%i4,%i4
	 .word	0x99b48d8c !fxor	%f18,%f12,%f12		! 8/14/10:X[0,1]|=Tmp
	add		%o7,%l4,%l4
	or		%i4,%i3,%i4
	or		%i5,%l1,%l1
	add		%i4,%l4,%l4
	 std		%f20,[%o5+40]		!
	sll		%l4,5,%i3			!! 46
	ld		[%o5+56],%o7
	 .word	0x9fb30daf !fxors	%f12,%f15,%f15! 0/ 0/ 0:X[1]^=X[14]
	srl		%l4,27,%i4
	add		%i3,%l3,%l3
	 .word	0xa5b00d86 !fxor	%f0,%f6,%f18! 1/ 1/ 1:Tmp=X[2,3]^X[8,9]
	and		%l1,%l0,%i3
	add		%i4,%l3,%l3
	sll		%l0,30,%i5
	or		%l1,%l0,%i4
	 .word	0x9db48d8e !fxor	%f18,%f14,%f14		! 2/ 4/ 3:X[0,1]^=X[2,3]^X[8,9]
	srl		%l0,2,%l0
	and		%l2,%i4,%i4
	add		%o7,%l3,%l3
	or		%i4,%i3,%i4
	 .word	0xa5b3890e !faligndata	%f14,%f14,%f18		! 3/ 7/ 5:Tmp=X[0,1]>>>24
	or		%i5,%l0,%l0
	add		%i4,%l3,%l3
	 .word	0x9db38a4e !fpadd32	%f14,%f14,%f14		! 4/ 8/ 6:X[0,1]<<=1
	sll		%l3,5,%i3			!! 47
	ld		[%o5+60],%o7
	srl		%l3,27,%i4
	add		%i3,%l2,%l2
	 .word	0xa5b486e1 !fmul8ulx16	%f18,%f32,%f18			! 5/10/ 7:Tmp>>=7, Tmp&=1
	and		%l0,%l4,%i3
	add		%i4,%l2,%l2
	 .word	0xa9b24a4c !fpadd32	%f40,%f12,%f20			!
	sll		%l4,30,%i5
	or		%l0,%l4,%i4
	 .word	0x81b34da0 !fxors	%f13,%f0,%f0	!-1/-1/-1:X[0]^=X[13]
	srl		%l4,2,%l4
	and		%l1,%i4,%i4
	 .word	0x9db48d8e !fxor	%f18,%f14,%f14		! 8/14/10:X[0,1]|=Tmp
	add		%o7,%l2,%l2
	or		%i4,%i3,%i4
	or		%i5,%l4,%l4
	add		%i4,%l2,%l2
	 std		%f20,[%o5+48]		!
	sll		%l2,5,%i3			!! 48
	ld		[%o5+0],%o7
	 .word	0x83b38da1 !fxors	%f14,%f1,%f1! 0/ 0/ 0:X[1]^=X[14]
	srl		%l2,27,%i4
	add		%i3,%l1,%l1
	 .word	0xa5b08d88 !fxor	%f2,%f8,%f18! 1/ 1/ 1:Tmp=X[2,3]^X[8,9]
	and		%l4,%l3,%i3
	add		%i4,%l1,%l1
	sll		%l3,30,%i5
	or		%l4,%l3,%i4
	 .word	0x81b48d80 !fxor	%f18,%f0,%f0		! 2/ 4/ 3:X[0,1]^=X[2,3]^X[8,9]
	srl		%l3,2,%l3
	and		%l0,%i4,%i4
	add		%o7,%l1,%l1
	or		%i4,%i3,%i4
	 .word	0xa5b00900 !faligndata	%f0,%f0,%f18		! 3/ 7/ 5:Tmp=X[0,1]>>>24
	or		%i5,%l3,%l3
	add		%i4,%l1,%l1
	 .word	0x81b00a40 !fpadd32	%f0,%f0,%f0		! 4/ 8/ 6:X[0,1]<<=1
	sll		%l1,5,%i3			!! 49
	ld		[%o5+4],%o7
	srl		%l1,27,%i4
	add		%i3,%l0,%l0
	 .word	0xa5b486e1 !fmul8ulx16	%f18,%f32,%f18			! 5/10/ 7:Tmp>>=7, Tmp&=1
	and		%l3,%l2,%i3
	add		%i4,%l0,%l0
	 .word	0xa9b24a4e !fpadd32	%f40,%f14,%f20			!
	sll		%l2,30,%i5
	or		%l3,%l2,%i4
	 .word	0x85b3cda2 !fxors	%f15,%f2,%f2	!-1/-1/-1:X[0]^=X[13]
	srl		%l2,2,%l2
	and		%l4,%i4,%i4
	 .word	0x81b48d80 !fxor	%f18,%f0,%f0		! 8/14/10:X[0,1]|=Tmp
	add		%o7,%l0,%l0
	or		%i4,%i3,%i4
	or		%i5,%l2,%l2
	add		%i4,%l0,%l0
	 std		%f20,[%o5+56]		!
	sll		%l0,5,%i3			!! 50
	ld		[%o5+8],%o7
	 .word	0x87b00da3 !fxors	%f0,%f3,%f3! 0/ 0/ 0:X[1]^=X[14]
	srl		%l0,27,%i4
	add		%i3,%l4,%l4
	 .word	0xa5b10d8a !fxor	%f4,%f10,%f18! 1/ 1/ 1:Tmp=X[2,3]^X[8,9]
	and		%l2,%l1,%i3
	add		%i4,%l4,%l4
	sll		%l1,30,%i5
	or		%l2,%l1,%i4
	 .word	0x85b48d82 !fxor	%f18,%f2,%f2		! 2/ 4/ 3:X[0,1]^=X[2,3]^X[8,9]
	srl		%l1,2,%l1
	and		%l3,%i4,%i4
	add		%o7,%l4,%l4
	or		%i4,%i3,%i4
	 .word	0xa5b08902 !faligndata	%f2,%f2,%f18		! 3/ 7/ 5:Tmp=X[0,1]>>>24
	or		%i5,%l1,%l1
	add		%i4,%l4,%l4
	 .word	0x85b08a42 !fpadd32	%f2,%f2,%f2		! 4/ 8/ 6:X[0,1]<<=1
	sll		%l4,5,%i3			!! 51
	ld		[%o5+12],%o7
	srl		%l4,27,%i4
	add		%i3,%l3,%l3
	 .word	0xa5b486e1 !fmul8ulx16	%f18,%f32,%f18			! 5/10/ 7:Tmp>>=7, Tmp&=1
	and		%l1,%l0,%i3
	add		%i4,%l3,%l3
	 .word	0xa9b24a40 !fpadd32	%f40,%f0,%f20			!
	sll		%l0,30,%i5
	or		%l1,%l0,%i4
	 .word	0x89b04da4 !fxors	%f1,%f4,%f4	!-1/-1/-1:X[0]^=X[13]
	srl		%l0,2,%l0
	and		%l2,%i4,%i4
	 .word	0x85b48d82 !fxor	%f18,%f2,%f2		! 8/14/10:X[0,1]|=Tmp
	add		%o7,%l3,%l3
	or		%i4,%i3,%i4
	or		%i5,%l0,%l0
	add		%i4,%l3,%l3
	 std		%f20,[%o5+0]		!
	sll		%l3,5,%i3			!! 52
	ld		[%o5+16],%o7
	 .word	0x8bb08da5 !fxors	%f2,%f5,%f5! 0/ 0/ 0:X[1]^=X[14]
	srl		%l3,27,%i4
	add		%i3,%l2,%l2
	 .word	0xa5b18d8c !fxor	%f6,%f12,%f18! 1/ 1/ 1:Tmp=X[2,3]^X[8,9]
	and		%l0,%l4,%i3
	add		%i4,%l2,%l2
	sll		%l4,30,%i5
	or		%l0,%l4,%i4
	 .word	0x89b48d84 !fxor	%f18,%f4,%f4		! 2/ 4/ 3:X[0,1]^=X[2,3]^X[8,9]
	srl		%l4,2,%l4
	and		%l1,%i4,%i4
	add		%o7,%l2,%l2
	or		%i4,%i3,%i4
	 .word	0xa5b10904 !faligndata	%f4,%f4,%f18		! 3/ 7/ 5:Tmp=X[0,1]>>>24
	or		%i5,%l4,%l4
	add		%i4,%l2,%l2
	 .word	0x89b10a44 !fpadd32	%f4,%f4,%f4		! 4/ 8/ 6:X[0,1]<<=1
	sll		%l2,5,%i3			!! 53
	ld		[%o5+20],%o7
	srl		%l2,27,%i4
	add		%i3,%l1,%l1
	 .word	0xa5b486e1 !fmul8ulx16	%f18,%f32,%f18			! 5/10/ 7:Tmp>>=7, Tmp&=1
	and		%l4,%l3,%i3
	add		%i4,%l1,%l1
	 .word	0xa9b24a42 !fpadd32	%f40,%f2,%f20			!
	sll		%l3,30,%i5
	or		%l4,%l3,%i4
	 .word	0x8db0cda6 !fxors	%f3,%f6,%f6	!-1/-1/-1:X[0]^=X[13]
	srl		%l3,2,%l3
	and		%l0,%i4,%i4
	 .word	0x89b48d84 !fxor	%f18,%f4,%f4		! 8/14/10:X[0,1]|=Tmp
	add		%o7,%l1,%l1
	or		%i4,%i3,%i4
	or		%i5,%l3,%l3
	add		%i4,%l1,%l1
	 std		%f20,[%o5+8]		!
	sll		%l1,5,%i3			!! 54
	ld		[%o5+24],%o7
	 .word	0x8fb10da7 !fxors	%f4,%f7,%f7! 0/ 0/ 0:X[1]^=X[14]
	srl		%l1,27,%i4
	add		%i3,%l0,%l0
	 .word	0xa5b20d8e !fxor	%f8,%f14,%f18! 1/ 1/ 1:Tmp=X[2,3]^X[8,9]
	and		%l3,%l2,%i3
	add		%i4,%l0,%l0
	sll		%l2,30,%i5
	or		%l3,%l2,%i4
	 .word	0x8db48d86 !fxor	%f18,%f6,%f6		! 2/ 4/ 3:X[0,1]^=X[2,3]^X[8,9]
	srl		%l2,2,%l2
	and		%l4,%i4,%i4
	add		%o7,%l0,%l0
	or		%i4,%i3,%i4
	 .word	0xa5b18906 !faligndata	%f6,%f6,%f18		! 3/ 7/ 5:Tmp=X[0,1]>>>24
	or		%i5,%l2,%l2
	add		%i4,%l0,%l0
	 .word	0x8db18a46 !fpadd32	%f6,%f6,%f6		! 4/ 8/ 6:X[0,1]<<=1
	sll		%l0,5,%i3			!! 55
	ld		[%o5+28],%o7
	srl		%l0,27,%i4
	add		%i3,%l4,%l4
	 .word	0xa5b486e1 !fmul8ulx16	%f18,%f32,%f18			! 5/10/ 7:Tmp>>=7, Tmp&=1
	and		%l2,%l1,%i3
	add		%i4,%l4,%l4
	 .word	0xa9b24a44 !fpadd32	%f40,%f4,%f20			!
	sll		%l1,30,%i5
	or		%l2,%l1,%i4
	 .word	0x91b14da8 !fxors	%f5,%f8,%f8	!-1/-1/-1:X[0]^=X[13]
	srl		%l1,2,%l1
	and		%l3,%i4,%i4
	 .word	0x8db48d86 !fxor	%f18,%f6,%f6		! 8/14/10:X[0,1]|=Tmp
	add		%o7,%l4,%l4
	or		%i4,%i3,%i4
	or		%i5,%l1,%l1
	add		%i4,%l4,%l4
	 std		%f20,[%o5+16]		!
	sll		%l4,5,%i3			!! 56
	ld		[%o5+32],%o7
	 .word	0x93b18da9 !fxors	%f6,%f9,%f9! 0/ 0/ 0:X[1]^=X[14]
	srl		%l4,27,%i4
	add		%i3,%l3,%l3
	 .word	0xa5b28d80 !fxor	%f10,%f0,%f18! 1/ 1/ 1:Tmp=X[2,3]^X[8,9]
	and		%l1,%l0,%i3
	add		%i4,%l3,%l3
	sll		%l0,30,%i5
	or		%l1,%l0,%i4
	 .word	0x91b48d88 !fxor	%f18,%f8,%f8		! 2/ 4/ 3:X[0,1]^=X[2,3]^X[8,9]
	srl		%l0,2,%l0
	and		%l2,%i4,%i4
	add		%o7,%l3,%l3
	or		%i4,%i3,%i4
	 .word	0xa5b20908 !faligndata	%f8,%f8,%f18		! 3/ 7/ 5:Tmp=X[0,1]>>>24
	or		%i5,%l0,%l0
	add		%i4,%l3,%l3
	 .word	0x91b20a48 !fpadd32	%f8,%f8,%f8		! 4/ 8/ 6:X[0,1]<<=1
	sll		%l3,5,%i3			!! 57
	ld		[%o5+36],%o7
	srl		%l3,27,%i4
	add		%i3,%l2,%l2
	 .word	0xa5b486e1 !fmul8ulx16	%f18,%f32,%f18			! 5/10/ 7:Tmp>>=7, Tmp&=1
	and		%l0,%l4,%i3
	add		%i4,%l2,%l2
	 .word	0xa9b24a46 !fpadd32	%f40,%f6,%f20			!
	sll		%l4,30,%i5
	or		%l0,%l4,%i4
	 .word	0x95b1cdaa !fxors	%f7,%f10,%f10	!-1/-1/-1:X[0]^=X[13]
	srl		%l4,2,%l4
	and		%l1,%i4,%i4
	 .word	0x91b48d88 !fxor	%f18,%f8,%f8		! 8/14/10:X[0,1]|=Tmp
	add		%o7,%l2,%l2
	or		%i4,%i3,%i4
	or		%i5,%l4,%l4
	add		%i4,%l2,%l2
	 std		%f20,[%o5+24]		!
	sll		%l2,5,%i3			!! 58
	ld		[%o5+40],%o7
	 .word	0x97b20dab !fxors	%f8,%f11,%f11! 0/ 0/ 0:X[1]^=X[14]
	srl		%l2,27,%i4
	add		%i3,%l1,%l1
	 .word	0xa5b30d82 !fxor	%f12,%f2,%f18! 1/ 1/ 1:Tmp=X[2,3]^X[8,9]
	and		%l4,%l3,%i3
	add		%i4,%l1,%l1
	sll		%l3,30,%i5
	or		%l4,%l3,%i4
	 .word	0x95b48d8a !fxor	%f18,%f10,%f10		! 2/ 4/ 3:X[0,1]^=X[2,3]^X[8,9]
	srl		%l3,2,%l3
	and		%l0,%i4,%i4
	add		%o7,%l1,%l1
	or		%i4,%i3,%i4
	 .word	0xa5b2890a !faligndata	%f10,%f10,%f18		! 3/ 7/ 5:Tmp=X[0,1]>>>24
	or		%i5,%l3,%l3
	add		%i4,%l1,%l1
	 .word	0x95b28a4a !fpadd32	%f10,%f10,%f10		! 4/ 8/ 6:X[0,1]<<=1
	sll		%l1,5,%i3			!! 59
	ld		[%o5+44],%o7
	srl		%l1,27,%i4
	add		%i3,%l0,%l0
	 .word	0xa5b486e1 !fmul8ulx16	%f18,%f32,%f18			! 5/10/ 7:Tmp>>=7, Tmp&=1
	and		%l3,%l2,%i3
	add		%i4,%l0,%l0
	 .word	0xa9b24a48 !fpadd32	%f40,%f8,%f20			!
	sll		%l2,30,%i5
	or		%l3,%l2,%i4
	 .word	0x99b24dac !fxors	%f9,%f12,%f12	!-1/-1/-1:X[0]^=X[13]
	srl		%l2,2,%l2
	and		%l4,%i4,%i4
	 .word	0x95b48d8a !fxor	%f18,%f10,%f10		! 8/14/10:X[0,1]|=Tmp
	add		%o7,%l0,%l0
	or		%i4,%i3,%i4
	or		%i5,%l2,%l2
	add		%i4,%l0,%l0
	 std		%f20,[%o5+32]		!
	sll		%l0,5,%i3			!! 60
	ld		[%o5+48],%o7
	 .word	0x9bb28dad !fxors	%f10,%f13,%f13! 0/ 0/ 0:X[1]^=X[14]
	srl		%l0,27,%i4
	add		%i3,%l4,%l4
	 .word	0xa5b38d84 !fxor	%f14,%f4,%f18! 1/ 1/ 1:Tmp=X[2,3]^X[8,9]
	xor		%l2,%l1,%i3
	add		%i4,%l4,%l4
	sll		%l1,30,%i5
	xor		%l3,%i3,%i4
	 .word	0x99b48d8c !fxor	%f18,%f12,%f12		! 2/ 4/ 3:X[0,1]^=X[2,3]^X[8,9]
	srl		%l1,2,%l1
	add		%i4,%l4,%l4
	or		%i5,%l1,%l1
	add		%o7,%l4,%l4
	 .word	0xa5b3090c !faligndata	%f12,%f12,%f18		! 3/ 7/ 5:Tmp=X[0,1]>>>24
	sll		%l4,5,%i3			!! 61
	ld		[%o5+52],%o7
	 .word	0x99b30a4c !fpadd32	%f12,%f12,%f12		! 4/ 8/ 6:X[0,1]<<=1
	srl		%l4,27,%i4
	add		%i3,%l3,%l3
	 .word	0xa5b486e1 !fmul8ulx16	%f18,%f32,%f18			! 5/10/ 7:Tmp>>=7, Tmp&=1
	xor		%l1,%l0,%i3
	add		%i4,%l3,%l3
	 .word	0xa9b24a4a !fpadd32	%f40,%f10,%f20			!
	sll		%l0,30,%i5
	xor		%l2,%i3,%i4
	 .word	0x9db2cdae !fxors	%f11,%f14,%f14	!-1/-1/-1:X[0]^=X[13]
	srl		%l0,2,%l0
	add		%i4,%l3,%l3
	 .word	0x99b48d8c !fxor	%f18,%f12,%f12		! 8/14/10:X[0,1]|=Tmp
	or		%i5,%l0,%l0
	add		%o7,%l3,%l3
	 std		%f20,[%o5+40]		!
	sll		%l3,5,%i3			!! 62
	ld		[%o5+56],%o7
	 .word	0x9fb30daf !fxors	%f12,%f15,%f15! 0/ 0/ 0:X[1]^=X[14]
	srl		%l3,27,%i4
	add		%i3,%l2,%l2
	 .word	0xa5b00d86 !fxor	%f0,%f6,%f18! 1/ 1/ 1:Tmp=X[2,3]^X[8,9]
	xor		%l0,%l4,%i3
	add		%i4,%l2,%l2
	sll		%l4,30,%i5
	xor		%l1,%i3,%i4
	 .word	0x9db48d8e !fxor	%f18,%f14,%f14		! 2/ 4/ 3:X[0,1]^=X[2,3]^X[8,9]
	srl		%l4,2,%l4
	add		%i4,%l2,%l2
	or		%i5,%l4,%l4
	add		%o7,%l2,%l2
	 .word	0xa5b3890e !faligndata	%f14,%f14,%f18		! 3/ 7/ 5:Tmp=X[0,1]>>>24
	sll		%l2,5,%i3			!! 63
	ld		[%o5+60],%o7
	 .word	0x9db38a4e !fpadd32	%f14,%f14,%f14		! 4/ 8/ 6:X[0,1]<<=1
	srl		%l2,27,%i4
	add		%i3,%l1,%l1
	 .word	0xa5b486e1 !fmul8ulx16	%f18,%f32,%f18			! 5/10/ 7:Tmp>>=7, Tmp&=1
	xor		%l4,%l3,%i3
	add		%i4,%l1,%l1
	 .word	0xa9b24a4c !fpadd32	%f40,%f12,%f20			!
	sll		%l3,30,%i5
	xor		%l0,%i3,%i4
	 .word	0x81b34da0 !fxors	%f13,%f0,%f0	!-1/-1/-1:X[0]^=X[13]
	srl		%l3,2,%l3
	add		%i4,%l1,%l1
	 .word	0x9db48d8e !fxor	%f18,%f14,%f14		! 8/14/10:X[0,1]|=Tmp
	or		%i5,%l3,%l3
	add		%o7,%l1,%l1
	 std		%f20,[%o5+48]		!
	sll		%l1,5,%i3			!! 64
	ld		[%o5+0],%o7
	 .word	0xa9b24a4e !fpadd32	%f40,%f14,%f20
	srl		%l1,27,%i4
	add		%i3,%l0,%l0
	xor		%l3,%l2,%i3
	add		%i4,%l0,%l0
	sll		%l2,30,%i5
	xor		%l4,%i3,%i4
	 std		%f20,[%o5+56]
	srl		%l2,2,%l2
	add		%i4,%l0,%l0
	or		%i5,%l2,%l2
	add		%o7,%l0,%l0
	sll		%l0,5,%i3			!! 65
	ld		[%o5+4],%o7
	srl		%l0,27,%i4
	add		%i3,%l4,%l4
	xor		%l2,%l1,%i3
	add		%i4,%l4,%l4
	sll		%l1,30,%i5
	xor		%l3,%i3,%i4
	srl		%l1,2,%l1
	add		%i4,%l4,%l4
	or		%i5,%l1,%l1
	add		%o7,%l4,%l4
	sll		%l4,5,%i3			!! 66
	ld		[%o5+8],%o7
	srl		%l4,27,%i4
	add		%i3,%l3,%l3
	xor		%l1,%l0,%i3
	add		%i4,%l3,%l3
	sll		%l0,30,%i5
	xor		%l2,%i3,%i4
	srl		%l0,2,%l0
	add		%i4,%l3,%l3
	or		%i5,%l0,%l0
	add		%o7,%l3,%l3
	sll		%l3,5,%i3			!! 67
	ld		[%o5+12],%o7
	srl		%l3,27,%i4
	add		%i3,%l2,%l2
	xor		%l0,%l4,%i3
	add		%i4,%l2,%l2
	sll		%l4,30,%i5
	xor		%l1,%i3,%i4
	srl		%l4,2,%l4
	add		%i4,%l2,%l2
	or		%i5,%l4,%l4
	add		%o7,%l2,%l2
	sll		%l2,5,%i3			!! 68
	ld		[%o5+16],%o7
	srl		%l2,27,%i4
	add		%i3,%l1,%l1
	xor		%l4,%l3,%i3
	add		%i4,%l1,%l1
	sll		%l3,30,%i5
	xor		%l0,%i3,%i4
	srl		%l3,2,%l3
	add		%i4,%l1,%l1
	or		%i5,%l3,%l3
	add		%o7,%l1,%l1
	sll		%l1,5,%i3			!! 69
	ld		[%o5+20],%o7
	srl		%l1,27,%i4
	add		%i3,%l0,%l0
	xor		%l3,%l2,%i3
	add		%i4,%l0,%l0
	sll		%l2,30,%i5
	xor		%l4,%i3,%i4
	srl		%l2,2,%l2
	add		%i4,%l0,%l0
	or		%i5,%l2,%l2
	add		%o7,%l0,%l0
	tst		%i2
	bz,pn		%icc,.Ltail
	nop
	sll		%l0,5,%i3			!! 70
	ld		[%o5+24],%o7
	srl		%l0,27,%i4
	add		%i3,%l4,%l4
	 ldd		[%i1+64],%f0
	xor		%l2,%l1,%i3
	add		%i4,%l4,%l4
	sll		%l1,30,%i5
	xor		%l3,%i3,%i4
	srl		%l1,2,%l1
	add		%i4,%l4,%l4
	or		%i5,%l1,%l1
	add		%o7,%l4,%l4

	and		%i1,-64,%g5
	inc		64,%i1
	and		%g5,255,%g5
	.word	0x81b00304 !alignaddr	%g0,%g4,%g0
	add		%g1,%g5,%g5
	 ldd		[%i1+8],%f2
	sll		%l4,5,%i3			!! 71
	ld		[%o5+28],%o7
	srl		%l4,27,%i4
	add		%i3,%l3,%l3
	xor		%l1,%l0,%i3
	add		%i4,%l3,%l3
	sll		%l0,30,%i5
	xor		%l2,%i3,%i4
	srl		%l0,2,%l0
	add		%i4,%l3,%l3
	or		%i5,%l0,%l0
	add		%o7,%l3,%l3
	 ldd		[%i1+16],%f4
	 .word	0x81b00902 !faligndata	%f0,%f2,%f0
	sll		%l3,5,%i3			!! 72
	ld		[%o5+32],%o7
	srl		%l3,27,%i4
	add		%i3,%l2,%l2
	xor		%l0,%l4,%i3
	add		%i4,%l2,%l2
	 .word	0xa9b0ca40 !fpadd32	%f34,%f0,%f20
	sll		%l4,30,%i5
	xor		%l1,%i3,%i4
	srl		%l4,2,%l4
	add		%i4,%l2,%l2
	or		%i5,%l4,%l4
	add		%o7,%l2,%l2
	 ldd		[%i1+24],%f6
	 std		%f20,[%g5+0]
	 .word	0x85b08904 !faligndata	%f2,%f4,%f2
	sll		%l2,5,%i3			!! 73
	ld		[%o5+36],%o7
	srl		%l2,27,%i4
	add		%i3,%l1,%l1
	xor		%l4,%l3,%i3
	add		%i4,%l1,%l1
	 .word	0xa9b0ca42 !fpadd32	%f34,%f2,%f20
	sll		%l3,30,%i5
	xor		%l0,%i3,%i4
	srl		%l3,2,%l3
	add		%i4,%l1,%l1
	or		%i5,%l3,%l3
	add		%o7,%l1,%l1
	 ldd		[%i1+32],%f8
	 std		%f20,[%g5+8]
	 .word	0x89b10906 !faligndata	%f4,%f6,%f4
	sll		%l1,5,%i3			!! 74
	ld		[%o5+40],%o7
	srl		%l1,27,%i4
	add		%i3,%l0,%l0
	xor		%l3,%l2,%i3
	add		%i4,%l0,%l0
	 .word	0xa9b0ca44 !fpadd32	%f34,%f4,%f20
	sll		%l2,30,%i5
	xor		%l4,%i3,%i4
	srl		%l2,2,%l2
	add		%i4,%l0,%l0
	or		%i5,%l2,%l2
	add		%o7,%l0,%l0
	 ldd		[%i1+40],%f10
	 std		%f20,[%g5+16]
	 .word	0x8db18908 !faligndata	%f6,%f8,%f6
	sll		%l0,5,%i3			!! 75
	ld		[%o5+44],%o7
	srl		%l0,27,%i4
	add		%i3,%l4,%l4
	xor		%l2,%l1,%i3
	add		%i4,%l4,%l4
	 .word	0xa9b0ca46 !fpadd32	%f34,%f6,%f20
	sll		%l1,30,%i5
	xor		%l3,%i3,%i4
	srl		%l1,2,%l1
	add		%i4,%l4,%l4
	or		%i5,%l1,%l1
	add		%o7,%l4,%l4
	 ldd		[%i1+48],%f12
	 std		%f20,[%g5+24]
	 .word	0x91b2090a !faligndata	%f8,%f10,%f8
	sll		%l4,5,%i3			!! 76
	ld		[%o5+48],%o7
	srl		%l4,27,%i4
	add		%i3,%l3,%l3
	xor		%l1,%l0,%i3
	add		%i4,%l3,%l3
	 .word	0xa9b0ca48 !fpadd32	%f34,%f8,%f20
	sll		%l0,30,%i5
	xor		%l2,%i3,%i4
	srl		%l0,2,%l0
	add		%i4,%l3,%l3
	or		%i5,%l0,%l0
	add		%o7,%l3,%l3
	 ldd		[%i1+56],%f14
	 std		%f20,[%g5+32]
	 .word	0x95b2890c !faligndata	%f10,%f12,%f10
	sll		%l3,5,%i3			!! 77
	ld		[%o5+52],%o7
	srl		%l3,27,%i4
	add		%i3,%l2,%l2
	xor		%l0,%l4,%i3
	add		%i4,%l2,%l2
	 .word	0xa9b0ca4a !fpadd32	%f34,%f10,%f20
	sll		%l4,30,%i5
	xor		%l1,%i3,%i4
	srl		%l4,2,%l4
	add		%i4,%l2,%l2
	or		%i5,%l4,%l4
	add		%o7,%l2,%l2
	 add		%g4,63,%i3
	 and		%i3,-8,%i3
	 ldd		[%i1+%i3],%f16
	 std		%f20,[%g5+40]
	 .word	0x99b3090e !faligndata	%f12,%f14,%f12
	sll		%l2,5,%i3			!! 78
	ld		[%o5+56],%o7
	srl		%l2,27,%i4
	add		%i3,%l1,%l1
	xor		%l4,%l3,%i3
	add		%i4,%l1,%l1
	 .word	0xa9b0ca4c !fpadd32	%f34,%f12,%f20
	sll		%l3,30,%i5
	xor		%l0,%i3,%i4
	srl		%l3,2,%l3
	add		%i4,%l1,%l1
	or		%i5,%l3,%l3
	add		%o7,%l1,%l1
	 std		%f20,[%g5+48]
	 .word	0x9db38910 !faligndata	%f14,%f16,%f14
	sll		%l1,5,%i3			!! 79
	ld		[%o5+60],%o7
	srl		%l1,27,%i4
	add		%i3,%l0,%l0
	xor		%l3,%l2,%i3
	add		%i4,%l0,%l0
	 .word	0xa9b0ca4e !fpadd32	%f34,%f14,%f20
	sll		%l2,30,%i5
	xor		%l4,%i3,%i4
	srl		%l2,2,%l2
	add		%i4,%l0,%l0
	or		%i5,%l2,%l2
	add		%o7,%l0,%l0
	 std		%f20,[%g5+56]
	add		%l0,%o0,%o0
	add		%l1,%o1,%o1
	add		%l2,%o2,%o2
	add		%l3,%o3,%o3
	add		%l4,%o4,%o4
	mov		5,%i3
	.word	0x81b34da0 !fxors	%f13,%f0,%f0
	mov		%o0,%l0
	mov		%o1,%l1
	mov		%o2,%l2
	mov		%o3,%l3
	mov		%o4,%l4
	.word	0x81b0031b !alignaddr	%g0,%i3,%g0	
	dec		1,%i2
	ba		.Loop
	mov		%g5,%o5

.align	32
.Ltail:
	sll		%l0,5,%i3			!! 70
	ld		[%o5+24],%o7
	srl		%l0,27,%i4
	add		%i3,%l4,%l4
	xor		%l2,%l1,%i3
	add		%i4,%l4,%l4
	sll		%l1,30,%i5
	xor		%l3,%i3,%i4
	srl		%l1,2,%l1
	add		%i4,%l4,%l4
	or		%i5,%l1,%l1
	add		%o7,%l4,%l4
	sll		%l4,5,%i3			!! 71
	ld		[%o5+28],%o7
	srl		%l4,27,%i4
	add		%i3,%l3,%l3
	xor		%l1,%l0,%i3
	add		%i4,%l3,%l3
	sll		%l0,30,%i5
	xor		%l2,%i3,%i4
	srl		%l0,2,%l0
	add		%i4,%l3,%l3
	or		%i5,%l0,%l0
	add		%o7,%l3,%l3
	sll		%l3,5,%i3			!! 72
	ld		[%o5+32],%o7
	srl		%l3,27,%i4
	add		%i3,%l2,%l2
	xor		%l0,%l4,%i3
	add		%i4,%l2,%l2
	sll		%l4,30,%i5
	xor		%l1,%i3,%i4
	srl		%l4,2,%l4
	add		%i4,%l2,%l2
	or		%i5,%l4,%l4
	add		%o7,%l2,%l2
	sll		%l2,5,%i3			!! 73
	ld		[%o5+36],%o7
	srl		%l2,27,%i4
	add		%i3,%l1,%l1
	xor		%l4,%l3,%i3
	add		%i4,%l1,%l1
	sll		%l3,30,%i5
	xor		%l0,%i3,%i4
	srl		%l3,2,%l3
	add		%i4,%l1,%l1
	or		%i5,%l3,%l3
	add		%o7,%l1,%l1
	sll		%l1,5,%i3			!! 74
	ld		[%o5+40],%o7
	srl		%l1,27,%i4
	add		%i3,%l0,%l0
	xor		%l3,%l2,%i3
	add		%i4,%l0,%l0
	sll		%l2,30,%i5
	xor		%l4,%i3,%i4
	srl		%l2,2,%l2
	add		%i4,%l0,%l0
	or		%i5,%l2,%l2
	add		%o7,%l0,%l0
	sll		%l0,5,%i3			!! 75
	ld		[%o5+44],%o7
	srl		%l0,27,%i4
	add		%i3,%l4,%l4
	xor		%l2,%l1,%i3
	add		%i4,%l4,%l4
	sll		%l1,30,%i5
	xor		%l3,%i3,%i4
	srl		%l1,2,%l1
	add		%i4,%l4,%l4
	or		%i5,%l1,%l1
	add		%o7,%l4,%l4
	sll		%l4,5,%i3			!! 76
	ld		[%o5+48],%o7
	srl		%l4,27,%i4
	add		%i3,%l3,%l3
	xor		%l1,%l0,%i3
	add		%i4,%l3,%l3
	sll		%l0,30,%i5
	xor		%l2,%i3,%i4
	srl		%l0,2,%l0
	add		%i4,%l3,%l3
	or		%i5,%l0,%l0
	add		%o7,%l3,%l3
	sll		%l3,5,%i3			!! 77
	ld		[%o5+52],%o7
	srl		%l3,27,%i4
	add		%i3,%l2,%l2
	xor		%l0,%l4,%i3
	add		%i4,%l2,%l2
	sll		%l4,30,%i5
	xor		%l1,%i3,%i4
	srl		%l4,2,%l4
	add		%i4,%l2,%l2
	or		%i5,%l4,%l4
	add		%o7,%l2,%l2
	sll		%l2,5,%i3			!! 78
	ld		[%o5+56],%o7
	srl		%l2,27,%i4
	add		%i3,%l1,%l1
	xor		%l4,%l3,%i3
	add		%i4,%l1,%l1
	sll		%l3,30,%i5
	xor		%l0,%i3,%i4
	srl		%l3,2,%l3
	add		%i4,%l1,%l1
	or		%i5,%l3,%l3
	add		%o7,%l1,%l1
	sll		%l1,5,%i3			!! 79
	ld		[%o5+60],%o7
	srl		%l1,27,%i4
	add		%i3,%l0,%l0
	xor		%l3,%l2,%i3
	add		%i4,%l0,%l0
	sll		%l2,30,%i5
	xor		%l4,%i3,%i4
	srl		%l2,2,%l2
	add		%i4,%l0,%l0
	or		%i5,%l2,%l2
	add		%o7,%l0,%l0
	add	%l0,%o0,%o0
	add	%l1,%o1,%o1
	add	%l2,%o2,%o2
	add	%l3,%o3,%o3
	add	%l4,%o4,%o4

	st	%o0,[%i0+0]
	st	%o1,[%i0+4]
	st	%o2,[%i0+8]
	st	%o3,[%i0+12]
	st	%o4,[%i0+16]

	ret
	restore
.type	sha1_block_data_order,#function
.size	sha1_block_data_order,(.-sha1_block_data_order)
.asciz	"SHA1 block transform for SPARCv9a, CRYPTOGAMS by <appro@openssl.org>"
.align	4

File Added: src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/cmll-x86_64.S
.text	


.globl	Camellia_EncryptBlock
.type	Camellia_EncryptBlock,@function
.align	16
Camellia_EncryptBlock:
	movl	$128,%eax
	subl	%edi,%eax
	movl	$3,%edi
	adcl	$0,%edi
	jmp	.Lenc_rounds
.size	Camellia_EncryptBlock,.-Camellia_EncryptBlock

.globl	Camellia_EncryptBlock_Rounds
.type	Camellia_EncryptBlock_Rounds,@function
.align	16
.Lenc_rounds:
Camellia_EncryptBlock_Rounds:
	pushq	%rbx
	pushq	%rbp
	pushq	%r13
	pushq	%r14
	pushq	%r15
.Lenc_prologue:


	movq	%rcx,%r13
	movq	%rdx,%r14

	shll	$6,%edi
	leaq	.LCamellia_SBOX@GOTPCREL(%rip),%rbp
	leaq	(%r14,%rdi,1),%r15

	movl	0(%rsi),%r8d
	movl	4(%rsi),%r9d
	movl	8(%rsi),%r10d
	bswapl	%r8d
	movl	12(%rsi),%r11d
	bswapl	%r9d
	bswapl	%r10d
	bswapl	%r11d

	call	_x86_64_Camellia_encrypt

	bswapl	%r8d
	bswapl	%r9d
	bswapl	%r10d
	movl	%r8d,0(%r13)
	bswapl	%r11d
	movl	%r9d,4(%r13)
	movl	%r10d,8(%r13)
	movl	%r11d,12(%r13)

	movq	0(%rsp),%r15
	movq	8(%rsp),%r14
	movq	16(%rsp),%r13
	movq	24(%rsp),%rbp
	movq	32(%rsp),%rbx
	leaq	40(%rsp),%rsp
.Lenc_epilogue:
	.byte	0xf3,0xc3
.size	Camellia_EncryptBlock_Rounds,.-Camellia_EncryptBlock_Rounds

.type	_x86_64_Camellia_encrypt,@function
.align	16
_x86_64_Camellia_encrypt:
	xorl	0(%r14),%r9d
	xorl	4(%r14),%r8d
	xorl	8(%r14),%r11d
	xorl	12(%r14),%r10d
.align	16
.Leloop:
	movl	16(%r14),%ebx
	movl	20(%r14),%eax

	xorl	%r8d,%eax
	xorl	%r9d,%ebx
	movzbl	%ah,%esi
	movzbl	%bl,%edi
	movl	2052(%rbp,%rsi,8),%edx
	movl	0(%rbp,%rdi,8),%ecx
	movzbl	%al,%esi
	shrl	$16,%eax
	movzbl	%bh,%edi
	xorl	4(%rbp,%rsi,8),%edx
	shrl	$16,%ebx
	xorl	4(%rbp,%rdi,8),%ecx
	movzbl	%ah,%esi
	movzbl	%bl,%edi
	xorl	0(%rbp,%rsi,8),%edx
	xorl	2052(%rbp,%rdi,8),%ecx
	movzbl	%al,%esi
	movzbl	%bh,%edi
	xorl	2048(%rbp,%rsi,8),%edx
	xorl	2048(%rbp,%rdi,8),%ecx
	movl	24(%r14),%ebx
	movl	28(%r14),%eax
	xorl	%edx,%ecx
	rorl	$8,%edx
	xorl	%ecx,%r10d
	xorl	%ecx,%r11d
	xorl	%edx,%r11d
	xorl	%r10d,%eax
	xorl	%r11d,%ebx
	movzbl	%ah,%esi
	movzbl	%bl,%edi
	movl	2052(%rbp,%rsi,8),%edx
	movl	0(%rbp,%rdi,8),%ecx
	movzbl	%al,%esi
	shrl	$16,%eax
	movzbl	%bh,%edi
	xorl	4(%rbp,%rsi,8),%edx
	shrl	$16,%ebx
	xorl	4(%rbp,%rdi,8),%ecx
	movzbl	%ah,%esi
	movzbl	%bl,%edi
	xorl	0(%rbp,%rsi,8),%edx
	xorl	2052(%rbp,%rdi,8),%ecx
	movzbl	%al,%esi
	movzbl	%bh,%edi
	xorl	2048(%rbp,%rsi,8),%edx
	xorl	2048(%rbp,%rdi,8),%ecx
	movl	32(%r14),%ebx
	movl	36(%r14),%eax
	xorl	%edx,%ecx
	rorl	$8,%edx
	xorl	%ecx,%r8d
	xorl	%ecx,%r9d
	xorl	%edx,%r9d
	xorl	%r8d,%eax
	xorl	%r9d,%ebx
	movzbl	%ah,%esi
	movzbl	%bl,%edi
	movl	2052(%rbp,%rsi,8),%edx
	movl	0(%rbp,%rdi,8),%ecx
	movzbl	%al,%esi
	shrl	$16,%eax
	movzbl	%bh,%edi
	xorl	4(%rbp,%rsi,8),%edx
	shrl	$16,%ebx
	xorl	4(%rbp,%rdi,8),%ecx
	movzbl	%ah,%esi
	movzbl	%bl,%edi
	xorl	0(%rbp,%rsi,8),%edx
	xorl	2052(%rbp,%rdi,8),%ecx
	movzbl	%al,%esi
	movzbl	%bh,%edi
	xorl	2048(%rbp,%rsi,8),%edx
	xorl	2048(%rbp,%rdi,8),%ecx
	movl	40(%r14),%ebx
	movl	44(%r14),%eax
	xorl	%edx,%ecx
	rorl	$8,%edx
	xorl	%ecx,%r10d
	xorl	%ecx,%r11d
	xorl	%edx,%r11d
	xorl	%r10d,%eax
	xorl	%r11d,%ebx
	movzbl	%ah,%esi
	movzbl	%bl,%edi
	movl	2052(%rbp,%rsi,8),%edx
	movl	0(%rbp,%rdi,8),%ecx
	movzbl	%al,%esi
	shrl	$16,%eax
	movzbl	%bh,%edi
	xorl	4(%rbp,%rsi,8),%edx
	shrl	$16,%ebx
	xorl	4(%rbp,%rdi,8),%ecx
	movzbl	%ah,%esi
	movzbl	%bl,%edi
	xorl	0(%rbp,%rsi,8),%edx
	xorl	2052(%rbp,%rdi,8),%ecx
	movzbl	%al,%esi
	movzbl	%bh,%edi
	xorl	2048(%rbp,%rsi,8),%edx
	xorl	2048(%rbp,%rdi,8),%ecx
	movl	48(%r14),%ebx
	movl	52(%r14),%eax
	xorl	%edx,%ecx
	rorl	$8,%edx
	xorl	%ecx,%r8d
	xorl	%ecx,%r9d
	xorl	%edx,%r9d
	xorl	%r8d,%eax
	xorl	%r9d,%ebx
	movzbl	%ah,%esi
	movzbl	%bl,%edi
	movl	2052(%rbp,%rsi,8),%edx
	movl	0(%rbp,%rdi,8),%ecx
	movzbl	%al,%esi
	shrl	$16,%eax
	movzbl	%bh,%edi
	xorl	4(%rbp,%rsi,8),%edx
	shrl	$16,%ebx
	xorl	4(%rbp,%rdi,8),%ecx
	movzbl	%ah,%esi
	movzbl	%bl,%edi
	xorl	0(%rbp,%rsi,8),%edx
	xorl	2052(%rbp,%rdi,8),%ecx
	movzbl	%al,%esi
	movzbl	%bh,%edi
	xorl	2048(%rbp,%rsi,8),%edx
	xorl	2048(%rbp,%rdi,8),%ecx
	movl	56(%r14),%ebx
	movl	60(%r14),%eax
	xorl	%edx,%ecx
	rorl	$8,%edx
	xorl	%ecx,%r10d
	xorl	%ecx,%r11d
	xorl	%edx,%r11d
	xorl	%r10d,%eax
	xorl	%r11d,%ebx
	movzbl	%ah,%esi
	movzbl	%bl,%edi
	movl	2052(%rbp,%rsi,8),%edx
	movl	0(%rbp,%rdi,8),%ecx
	movzbl	%al,%esi
	shrl	$16,%eax
	movzbl	%bh,%edi
	xorl	4(%rbp,%rsi,8),%edx
	shrl	$16,%ebx
	xorl	4(%rbp,%rdi,8),%ecx
	movzbl	%ah,%esi
	movzbl	%bl,%edi
	xorl	0(%rbp,%rsi,8),%edx
	xorl	2052(%rbp,%rdi,8),%ecx
	movzbl	%al,%esi
	movzbl	%bh,%edi
	xorl	2048(%rbp,%rsi,8),%edx
	xorl	2048(%rbp,%rdi,8),%ecx
	movl	64(%r14),%ebx
	movl	68(%r14),%eax
	xorl	%edx,%ecx
	rorl	$8,%edx
	xorl	%ecx,%r8d
	xorl	%ecx,%r9d
	xorl	%edx,%r9d
	leaq	64(%r14),%r14
	cmpq	%r15,%r14
	movl	8(%r14),%edx
	movl	12(%r14),%ecx
	je	.Ledone

	andl	%r8d,%eax
	orl	%r11d,%edx
	roll	$1,%eax
	xorl	%edx,%r10d
	xorl	%eax,%r9d
	andl	%r10d,%ecx
	orl	%r9d,%ebx
	roll	$1,%ecx
	xorl	%ebx,%r8d
	xorl	%ecx,%r11d
	jmp	.Leloop

.align	16
.Ledone:
	xorl	%r10d,%eax
	xorl	%r11d,%ebx
	xorl	%r8d,%ecx
	xorl	%r9d,%edx

	movl	%eax,%r8d
	movl	%ebx,%r9d
	movl	%ecx,%r10d
	movl	%edx,%r11d

.byte	0xf3,0xc3		
.size	_x86_64_Camellia_encrypt,.-_x86_64_Camellia_encrypt


.globl	Camellia_DecryptBlock
.type	Camellia_DecryptBlock,@function
.align	16
Camellia_DecryptBlock:
	movl	$128,%eax
	subl	%edi,%eax
	movl	$3,%edi
	adcl	$0,%edi
	jmp	.Ldec_rounds
.size	Camellia_DecryptBlock,.-Camellia_DecryptBlock

.globl	Camellia_DecryptBlock_Rounds
.type	Camellia_DecryptBlock_Rounds,@function
.align	16
.Ldec_rounds:
Camellia_DecryptBlock_Rounds:
	pushq	%rbx
	pushq	%rbp
	pushq	%r13
	pushq	%r14
	pushq	%r15
.Ldec_prologue:


	movq	%rcx,%r13
	movq	%rdx,%r15

	shll	$6,%edi
	leaq	.LCamellia_SBOX@GOTPCREL(%rip),%rbp
	leaq	(%r15,%rdi,1),%r14

	movl	0(%rsi),%r8d
	movl	4(%rsi),%r9d
	movl	8(%rsi),%r10d
	bswapl	%r8d
	movl	12(%rsi),%r11d
	bswapl	%r9d
	bswapl	%r10d
	bswapl	%r11d

	call	_x86_64_Camellia_decrypt

	bswapl	%r8d
	bswapl	%r9d
	bswapl	%r10d
	movl	%r8d,0(%r13)
	bswapl	%r11d
	movl	%r9d,4(%r13)
	movl	%r10d,8(%r13)
	movl	%r11d,12(%r13)

	movq	0(%rsp),%r15
	movq	8(%rsp),%r14
	movq	16(%rsp),%r13
	movq	24(%rsp),%rbp
	movq	32(%rsp),%rbx
	leaq	40(%rsp),%rsp
.Ldec_epilogue:
	.byte	0xf3,0xc3
.size	Camellia_DecryptBlock_Rounds,.-Camellia_DecryptBlock_Rounds

.type	_x86_64_Camellia_decrypt,@function
.align	16
_x86_64_Camellia_decrypt:
	xorl	0(%r14),%r9d
	xorl	4(%r14),%r8d
	xorl	8(%r14),%r11d
	xorl	12(%r14),%r10d
.align	16
.Ldloop:
	movl	-8(%r14),%ebx
	movl	-4(%r14),%eax

	xorl	%r8d,%eax
	xorl	%r9d,%ebx
	movzbl	%ah,%esi
	movzbl	%bl,%edi
	movl	2052(%rbp,%rsi,8),%edx
	movl	0(%rbp,%rdi,8),%ecx
	movzbl	%al,%esi
	shrl	$16,%eax
	movzbl	%bh,%edi
	xorl	4(%rbp,%rsi,8),%edx
	shrl	$16,%ebx
	xorl	4(%rbp,%rdi,8),%ecx
	movzbl	%ah,%esi
	movzbl	%bl,%edi
	xorl	0(%rbp,%rsi,8),%edx
	xorl	2052(%rbp,%rdi,8),%ecx
	movzbl	%al,%esi
	movzbl	%bh,%edi
	xorl	2048(%rbp,%rsi,8),%edx
	xorl	2048(%rbp,%rdi,8),%ecx
	movl	-16(%r14),%ebx
	movl	-12(%r14),%eax
	xorl	%edx,%ecx
	rorl	$8,%edx
	xorl	%ecx,%r10d
	xorl	%ecx,%r11d
	xorl	%edx,%r11d
	xorl	%r10d,%eax
	xorl	%r11d,%ebx
	movzbl	%ah,%esi
	movzbl	%bl,%edi
	movl	2052(%rbp,%rsi,8),%edx
	movl	0(%rbp,%rdi,8),%ecx
	movzbl	%al,%esi
	shrl	$16,%eax
	movzbl	%bh,%edi
	xorl	4(%rbp,%rsi,8),%edx
	shrl	$16,%ebx
	xorl	4(%rbp,%rdi,8),%ecx
	movzbl	%ah,%esi
	movzbl	%bl,%edi
	xorl	0(%rbp,%rsi,8),%edx
	xorl	2052(%rbp,%rdi,8),%ecx
	movzbl	%al,%esi
	movzbl	%bh,%edi
	xorl	2048(%rbp,%rsi,8),%edx
	xorl	2048(%rbp,%rdi,8),%ecx
	movl	-24(%r14),%ebx
	movl	-20(%r14),%eax
	xorl	%edx,%ecx
	rorl	$8,%edx
	xorl	%ecx,%r8d
	xorl	%ecx,%r9d
	xorl	%edx,%r9d
	xorl	%r8d,%eax
	xorl	%r9d,%ebx
	movzbl	%ah,%esi
	movzbl	%bl,%edi
	movl	2052(%rbp,%rsi,8),%edx
	movl	0(%rbp,%rdi,8),%ecx
	movzbl	%al,%esi
	shrl	$16,%eax
	movzbl	%bh,%edi
	xorl	4(%rbp,%rsi,8),%edx
	shrl	$16,%ebx
	xorl	4(%rbp,%rdi,8),%ecx
	movzbl	%ah,%esi
	movzbl	%bl,%edi
	xorl	0(%rbp,%rsi,8),%edx
	xorl	2052(%rbp,%rdi,8),%ecx
	movzbl	%al,%esi
	movzbl	%bh,%edi
	xorl	2048(%rbp,%rsi,8),%edx
	xorl	2048(%rbp,%rdi,8),%ecx
	movl	-32(%r14),%ebx
	movl	-28(%r14),%eax
	xorl	%edx,%ecx
	rorl	$8,%edx
	xorl	%ecx,%r10d
	xorl	%ecx,%r11d
	xorl	%edx,%r11d
	xorl	%r10d,%eax
	xorl	%r11d,%ebx
	movzbl	%ah,%esi
	movzbl	%bl,%edi
	movl	2052(%rbp,%rsi,8),%edx
	movl	0(%rbp,%rdi,8),%ecx
	movzbl	%al,%esi
	shrl	$16,%eax
	movzbl	%bh,%edi
	xorl	4(%rbp,%rsi,8),%edx
	shrl	$16,%ebx
	xorl	4(%rbp,%rdi,8),%ecx
	movzbl	%ah,%esi
	movzbl	%bl,%edi
	xorl	0(%rbp,%rsi,8),%edx
	xorl	2052(%rbp,%rdi,8),%ecx
	movzbl	%al,%esi
	movzbl	%bh,%edi
	xorl	2048(%rbp,%rsi,8),%edx
	xorl	2048(%rbp,%rdi,8),%ecx
	movl	-40(%r14),%ebx
	movl	-36(%r14),%eax
	xorl	%edx,%ecx
	rorl	$8,%edx
	xorl	%ecx,%r8d
	xorl	%ecx,%r9d
	xorl	%edx,%r9d
	xorl	%r8d,%eax
	xorl	%r9d,%ebx
	movzbl	%ah,%esi
	movzbl	%bl,%edi
	movl	2052(%rbp,%rsi,8),%edx
	movl	0(%rbp,%rdi,8),%ecx
	movzbl	%al,%esi
	shrl	$16,%eax
	movzbl	%bh,%edi
	xorl	4(%rbp,%rsi,8),%edx
	shrl	$16,%ebx
	xorl	4(%rbp,%rdi,8),%ecx
	movzbl	%ah,%esi
	movzbl	%bl,%edi
	xorl	0(%rbp,%rsi,8),%edx
	xorl	2052(%rbp,%rdi,8),%ecx
	movzbl	%al,%esi
	movzbl	%bh,%edi
	xorl	2048(%rbp,%rsi,8),%edx
	xorl	2048(%rbp,%rdi,8),%ecx
	movl	-48(%r14),%ebx
	movl	-44(%r14),%eax
	xorl	%edx,%ecx
	rorl	$8,%edx
	xorl	%ecx,%r10d
	xorl	%ecx,%r11d
	xorl	%edx,%r11d
	xorl	%r10d,%eax
	xorl	%r11d,%ebx
	movzbl	%ah,%esi
	movzbl	%bl,%edi
	movl	2052(%rbp,%rsi,8),%edx
	movl	0(%rbp,%rdi,8),%ecx
	movzbl	%al,%esi
	shrl	$16,%eax
	movzbl	%bh,%edi
	xorl	4(%rbp,%rsi,8),%edx
	shrl	$16,%ebx
	xorl	4(%rbp,%rdi,8),%ecx
	movzbl	%ah,%esi
	movzbl	%bl,%edi
	xorl	0(%rbp,%rsi,8),%edx
	xorl	2052(%rbp,%rdi,8),%ecx
	movzbl	%al,%esi
	movzbl	%bh,%edi
	xorl	2048(%rbp,%rsi,8),%edx
	xorl	2048(%rbp,%rdi,8),%ecx
	movl	-56(%r14),%ebx
	movl	-52(%r14),%eax
	xorl	%edx,%ecx
	rorl	$8,%edx
	xorl	%ecx,%r8d
	xorl	%ecx,%r9d
	xorl	%edx,%r9d
	leaq	-64(%r14),%r14
	cmpq	%r15,%r14
	movl	0(%r14),%edx
	movl	4(%r14),%ecx
	je	.Lddone

	andl	%r8d,%eax
	orl	%r11d,%edx
	roll	$1,%eax
	xorl	%edx,%r10d
	xorl	%eax,%r9d
	andl	%r10d,%ecx
	orl	%r9d,%ebx
	roll	$1,%ecx
	xorl	%ebx,%r8d
	xorl	%ecx,%r11d

	jmp	.Ldloop

.align	16
.Lddone:
	xorl	%r10d,%ecx
	xorl	%r11d,%edx
	xorl	%r8d,%eax
	xorl	%r9d,%ebx

	movl	%ecx,%r8d
	movl	%edx,%r9d
	movl	%eax,%r10d
	movl	%ebx,%r11d

.byte	0xf3,0xc3		
.size	_x86_64_Camellia_decrypt,.-_x86_64_Camellia_decrypt
.globl	Camellia_Ekeygen
.type	Camellia_Ekeygen,@function
.align	16
Camellia_Ekeygen:
	pushq	%rbx
	pushq	%rbp
	pushq	%r13
	pushq	%r14
	pushq	%r15
.Lkey_prologue:

	movq	%rdi,%r15
	movq	%rdx,%r13

	movl	0(%rsi),%r8d
	movl	4(%rsi),%r9d
	movl	8(%rsi),%r10d
	movl	12(%rsi),%r11d

	bswapl	%r8d
	bswapl	%r9d
	bswapl	%r10d
	bswapl	%r11d
	movl	%r9d,0(%r13)
	movl	%r8d,4(%r13)
	movl	%r11d,8(%r13)
	movl	%r10d,12(%r13)
	cmpq	$128,%r15
	je	.L1st128

	movl	16(%rsi),%r8d
	movl	20(%rsi),%r9d
	cmpq	$192,%r15
	je	.L1st192
	movl	24(%rsi),%r10d
	movl	28(%rsi),%r11d
	jmp	.L1st256
.L1st192:
	movl	%r8d,%r10d
	movl	%r9d,%r11d
	notl	%r10d
	notl	%r11d
.L1st256:
	bswapl	%r8d
	bswapl	%r9d
	bswapl	%r10d
	bswapl	%r11d
	movl	%r9d,32(%r13)
	movl	%r8d,36(%r13)
	movl	%r11d,40(%r13)
	movl	%r10d,44(%r13)
	xorl	0(%r13),%r9d
	xorl	4(%r13),%r8d
	xorl	8(%r13),%r11d
	xorl	12(%r13),%r10d

.L1st128:
	leaq	.LCamellia_SIGMA@GOTPCREL(%rip),%r14
	leaq	.LCamellia_SBOX@GOTPCREL(%rip),%rbp

	movl	0(%r14),%ebx
	movl	4(%r14),%eax
	xorl	%r8d,%eax
	xorl	%r9d,%ebx
	movzbl	%ah,%esi
	movzbl	%bl,%edi
	movl	2052(%rbp,%rsi,8),%edx
	movl	0(%rbp,%rdi,8),%ecx
	movzbl	%al,%esi
	shrl	$16,%eax
	movzbl	%bh,%edi
	xorl	4(%rbp,%rsi,8),%edx
	shrl	$16,%ebx
	xorl	4(%rbp,%rdi,8),%ecx
	movzbl	%ah,%esi
	movzbl	%bl,%edi
	xorl	0(%rbp,%rsi,8),%edx
	xorl	2052(%rbp,%rdi,8),%ecx
	movzbl	%al,%esi
	movzbl	%bh,%edi
	xorl	2048(%rbp,%rsi,8),%edx
	xorl	2048(%rbp,%rdi,8),%ecx
	movl	8(%r14),%ebx
	movl	12(%r14),%eax
	xorl	%edx,%ecx
	rorl	$8,%edx
	xorl	%ecx,%r10d
	xorl	%ecx,%r11d
	xorl	%edx,%r11d
	xorl	%r10d,%eax
	xorl	%r11d,%ebx
	movzbl	%ah,%esi
	movzbl	%bl,%edi
	movl	2052(%rbp,%rsi,8),%edx
	movl	0(%rbp,%rdi,8),%ecx
	movzbl	%al,%esi
	shrl	$16,%eax
	movzbl	%bh,%edi
	xorl	4(%rbp,%rsi,8),%edx
	shrl	$16,%ebx
	xorl	4(%rbp,%rdi,8),%ecx
	movzbl	%ah,%esi
	movzbl	%bl,%edi
	xorl	0(%rbp,%rsi,8),%edx
	xorl	2052(%rbp,%rdi,8),%ecx
	movzbl	%al,%esi
	movzbl	%bh,%edi
	xorl	2048(%rbp,%rsi,8),%edx
	xorl	2048(%rbp,%rdi,8),%ecx
	movl	16(%r14),%ebx
	movl	20(%r14),%eax
	xorl	%edx,%ecx
	rorl	$8,%edx
	xorl	%ecx,%r8d
	xorl	%ecx,%r9d
	xorl	%edx,%r9d
	xorl	0(%r13),%r9d
	xorl	4(%r13),%r8d
	xorl	8(%r13),%r11d
	xorl	12(%r13),%r10d
	xorl	%r8d,%eax
	xorl	%r9d,%ebx
	movzbl	%ah,%esi
	movzbl	%bl,%edi
	movl	2052(%rbp,%rsi,8),%edx
	movl	0(%rbp,%rdi,8),%ecx
	movzbl	%al,%esi
	shrl	$16,%eax
	movzbl	%bh,%edi
	xorl	4(%rbp,%rsi,8),%edx
	shrl	$16,%ebx
	xorl	4(%rbp,%rdi,8),%ecx
	movzbl	%ah,%esi
	movzbl	%bl,%edi
	xorl	0(%rbp,%rsi,8),%edx
	xorl	2052(%rbp,%rdi,8),%ecx
	movzbl	%al,%esi
	movzbl	%bh,%edi
	xorl	2048(%rbp,%rsi,8),%edx
	xorl	2048(%rbp,%rdi,8),%ecx
	movl	24(%r14),%ebx
	movl	28(%r14),%eax
	xorl	%edx,%ecx
	rorl	$8,%edx
	xorl	%ecx,%r10d
	xorl	%ecx,%r11d
	xorl	%edx,%r11d
	xorl	%r10d,%eax
	xorl	%r11d,%ebx
	movzbl	%ah,%esi
	movzbl	%bl,%edi
	movl	2052(%rbp,%rsi,8),%edx
	movl	0(%rbp,%rdi,8),%ecx
	movzbl	%al,%esi
	shrl	$16,%eax
	movzbl	%bh,%edi
	xorl	4(%rbp,%rsi,8),%edx
	shrl	$16,%ebx
	xorl	4(%rbp,%rdi,8),%ecx
	movzbl	%ah,%esi
	movzbl	%bl,%edi
	xorl	0(%rbp,%rsi,8),%edx
	xorl	2052(%rbp,%rdi,8),%ecx
	movzbl	%al,%esi
	movzbl	%bh,%edi
	xorl	2048(%rbp,%rsi,8),%edx
	xorl	2048(%rbp,%rdi,8),%ecx
	movl	32(%r14),%ebx
	movl	36(%r14),%eax
	xorl	%edx,%ecx
	rorl	$8,%edx
	xorl	%ecx,%r8d
	xorl	%ecx,%r9d
	xorl	%edx,%r9d
	cmpq	$128,%r15
	jne	.L2nd256

	leaq	128(%r13),%r13
	shlq	$32,%r8
	shlq	$32,%r10
	orq	%r9,%r8
	orq	%r11,%r10
	movq	-128(%r13),%rax
	movq	-120(%r13),%rbx
	movq	%r8,-112(%r13)
	movq	%r10,-104(%r13)
	movq	%rax,%r11
	shlq	$15,%rax
	movq	%rbx,%r9
	shrq	$49,%r9
	shrq	$49,%r11
	orq	%r9,%rax
	shlq	$15,%rbx
	orq	%r11,%rbx
	movq	%rax,-96(%r13)
	movq	%rbx,-88(%r13)
	movq	%r8,%r11
	shlq	$15,%r8
	movq	%r10,%r9
	shrq	$49,%r9
	shrq	$49,%r11
	orq	%r9,%r8
	shlq	$15,%r10
	orq	%r11,%r10
	movq	%r8,-80(%r13)
	movq	%r10,-72(%r13)
	movq	%r8,%r11
	shlq	$15,%r8
	movq	%r10,%r9
	shrq	$49,%r9
	shrq	$49,%r11
	orq	%r9,%r8
	shlq	$15,%r10
	orq	%r11,%r10
	movq	%r8,-64(%r13)
	movq	%r10,-56(%r13)
	movq	%rax,%r11
	shlq	$30,%rax
	movq	%rbx,%r9
	shrq	$34,%r9
	shrq	$34,%r11
	orq	%r9,%rax
	shlq	$30,%rbx
	orq	%r11,%rbx
	movq	%rax,-48(%r13)
	movq	%rbx,-40(%r13)
	movq	%r8,%r11
	shlq	$15,%r8
	movq	%r10,%r9
	shrq	$49,%r9
	shrq	$49,%r11
	orq	%r9,%r8
	shlq	$15,%r10
	orq	%r11,%r10
	movq	%r8,-32(%r13)
	movq	%rax,%r11
	shlq	$15,%rax
	movq	%rbx,%r9
	shrq	$49,%r9
	shrq	$49,%r11
	orq	%r9,%rax
	shlq	$15,%rbx
	orq	%r11,%rbx
	movq	%rbx,-24(%r13)
	movq	%r8,%r11
	shlq	$15,%r8
	movq	%r10,%r9
	shrq	$49,%r9
	shrq	$49,%r11
	orq	%r9,%r8
	shlq	$15,%r10
	orq	%r11,%r10
	movq	%r8,-16(%r13)
	movq	%r10,-8(%r13)
	movq	%rax,%r11
	shlq	$17,%rax
	movq	%rbx,%r9
	shrq	$47,%r9
	shrq	$47,%r11
	orq	%r9,%rax
	shlq	$17,%rbx
	orq	%r11,%rbx
	movq	%rax,0(%r13)
	movq	%rbx,8(%r13)
	movq	%rax,%r11
	shlq	$17,%rax
	movq	%rbx,%r9
	shrq	$47,%r9
	shrq	$47,%r11
	orq	%r9,%rax
	shlq	$17,%rbx
	orq	%r11,%rbx
	movq	%rax,16(%r13)
	movq	%rbx,24(%r13)
	movq	%r8,%r11
	shlq	$34,%r8
	movq	%r10,%r9
	shrq	$30,%r9
	shrq	$30,%r11
	orq	%r9,%r8
	shlq	$34,%r10
	orq	%r11,%r10
	movq	%r8,32(%r13)
	movq	%r10,40(%r13)
	movq	%rax,%r11
	shlq	$17,%rax
	movq	%rbx,%r9
	shrq	$47,%r9
	shrq	$47,%r11
	orq	%r9,%rax
	shlq	$17,%rbx
	orq	%r11,%rbx
	movq	%rax,48(%r13)
	movq	%rbx,56(%r13)
	movq	%r8,%r11
	shlq	$17,%r8
	movq	%r10,%r9
	shrq	$47,%r9
	shrq	$47,%r11
	orq	%r9,%r8
	shlq	$17,%r10
	orq	%r11,%r10
	movq	%r8,64(%r13)
	movq	%r10,72(%r13)
	movl	$3,%eax
	jmp	.Ldone
.align	16
.L2nd256:
	movl	%r9d,48(%r13)
	movl	%r8d,52(%r13)
	movl	%r11d,56(%r13)
	movl	%r10d,60(%r13)
	xorl	32(%r13),%r9d
	xorl	36(%r13),%r8d
	xorl	40(%r13),%r11d
	xorl	44(%r13),%r10d
	xorl	%r8d,%eax
	xorl	%r9d,%ebx
	movzbl	%ah,%esi
	movzbl	%bl,%edi
	movl	2052(%rbp,%rsi,8),%edx
	movl	0(%rbp,%rdi,8),%ecx
	movzbl	%al,%esi
	shrl	$16,%eax
	movzbl	%bh,%edi
	xorl	4(%rbp,%rsi,8),%edx
	shrl	$16,%ebx
	xorl	4(%rbp,%rdi,8),%ecx
	movzbl	%ah,%esi
	movzbl	%bl,%edi
	xorl	0(%rbp,%rsi,8),%edx
	xorl	2052(%rbp,%rdi,8),%ecx
	movzbl	%al,%esi
	movzbl	%bh,%edi
	xorl	2048(%rbp,%rsi,8),%edx
	xorl	2048(%rbp,%rdi,8),%ecx
	movl	40(%r14),%ebx
	movl	44(%r14),%eax
	xorl	%edx,%ecx
	rorl	$8,%edx
	xorl	%ecx,%r10d
	xorl	%ecx,%r11d
	xorl	%edx,%r11d
	xorl	%r10d,%eax
	xorl	%r11d,%ebx
	movzbl	%ah,%esi
	movzbl	%bl,%edi
	movl	2052(%rbp,%rsi,8),%edx
	movl	0(%rbp,%rdi,8),%ecx
	movzbl	%al,%esi
	shrl	$16,%eax
	movzbl	%bh,%edi
	xorl	4(%rbp,%rsi,8),%edx
	shrl	$16,%ebx
	xorl	4(%rbp,%rdi,8),%ecx
	movzbl	%ah,%esi
	movzbl	%bl,%edi
	xorl	0(%rbp,%rsi,8),%edx
	xorl	2052(%rbp,%rdi,8),%ecx
	movzbl	%al,%esi
	movzbl	%bh,%edi
	xorl	2048(%rbp,%rsi,8),%edx
	xorl	2048(%rbp,%rdi,8),%ecx
	movl	48(%r14),%ebx
	movl	52(%r14),%eax
	xorl	%edx,%ecx
	rorl	$8,%edx
	xorl	%ecx,%r8d
	xorl	%ecx,%r9d
	xorl	%edx,%r9d
	movq	0(%r13),%rax
	movq	8(%r13),%rbx
	movq	32(%r13),%rcx
	movq	40(%r13),%rdx
	movq	48(%r13),%r14
	movq	56(%r13),%r15
	leaq	128(%r13),%r13
	shlq	$32,%r8
	shlq	$32,%r10
	orq	%r9,%r8
	orq	%r11,%r10
	movq	%r8,-112(%r13)
	movq	%r10,-104(%r13)
	movq	%rcx,%r11
	shlq	$15,%rcx
	movq	%rdx,%r9
	shrq	$49,%r9
	shrq	$49,%r11
	orq	%r9,%rcx
	shlq	$15,%rdx
	orq	%r11,%rdx
	movq	%rcx,-96(%r13)
	movq	%rdx,-88(%r13)
	movq	%r14,%r11
	shlq	$15,%r14
	movq	%r15,%r9
	shrq	$49,%r9
	shrq	$49,%r11
	orq	%r9,%r14
	shlq	$15,%r15
	orq	%r11,%r15
	movq	%r14,-80(%r13)
	movq	%r15,-72(%r13)
	movq	%rcx,%r11
	shlq	$15,%rcx
	movq	%rdx,%r9
	shrq	$49,%r9
	shrq	$49,%r11
	orq	%r9,%rcx
	shlq	$15,%rdx
	orq	%r11,%rdx
	movq	%rcx,-64(%r13)
	movq	%rdx,-56(%r13)
	movq	%r8,%r11
	shlq	$30,%r8
	movq	%r10,%r9
	shrq	$34,%r9
	shrq	$34,%r11
	orq	%r9,%r8
	shlq	$30,%r10
	orq	%r11,%r10
	movq	%r8,-48(%r13)
	movq	%r10,-40(%r13)
	movq	%rax,%r11
	shlq	$45,%rax
	movq	%rbx,%r9
	shrq	$19,%r9
	shrq	$19,%r11
	orq	%r9,%rax
	shlq	$45,%rbx
	orq	%r11,%rbx
	movq	%rax,-32(%r13)
	movq	%rbx,-24(%r13)
	movq	%r14,%r11
	shlq	$30,%r14
	movq	%r15,%r9
	shrq	$34,%r9
	shrq	$34,%r11
	orq	%r9,%r14
	shlq	$30,%r15
	orq	%r11,%r15
	movq	%r14,-16(%r13)
	movq	%r15,-8(%r13)
	movq	%rax,%r11
	shlq	$15,%rax
	movq	%rbx,%r9
	shrq	$49,%r9
	shrq	$49,%r11
	orq	%r9,%rax
	shlq	$15,%rbx
	orq	%r11,%rbx
	movq	%rax,0(%r13)
	movq	%rbx,8(%r13)
	movq	%rcx,%r11
	shlq	$30,%rcx
	movq	%rdx,%r9
	shrq	$34,%r9
	shrq	$34,%r11
	orq	%r9,%rcx
	shlq	$30,%rdx
	orq	%r11,%rdx
	movq	%rcx,16(%r13)
	movq	%rdx,24(%r13)
	movq	%r8,%r11
	shlq	$30,%r8
	movq	%r10,%r9
	shrq	$34,%r9
	shrq	$34,%r11
	orq	%r9,%r8
	shlq	$30,%r10
	orq	%r11,%r10
	movq	%r8,32(%r13)
	movq	%r10,40(%r13)
	movq	%rax,%r11
	shlq	$17,%rax
	movq	%rbx,%r9
	shrq	$47,%r9
	shrq	$47,%r11
	orq	%r9,%rax
	shlq	$17,%rbx
	orq	%r11,%rbx
	movq	%rax,48(%r13)
	movq	%rbx,56(%r13)
	movq	%r14,%r11
	shlq	$32,%r14
	movq	%r15,%r9
	shrq	$32,%r9
	shrq	$32,%r11
	orq	%r9,%r14
	shlq	$32,%r15
	orq	%r11,%r15
	movq	%r14,64(%r13)
	movq	%r15,72(%r13)
	movq	%rcx,%r11
	shlq	$34,%rcx
	movq	%rdx,%r9
	shrq	$30,%r9
	shrq	$30,%r11
	orq	%r9,%rcx
	shlq	$34,%rdx
	orq	%r11,%rdx
	movq	%rcx,80(%r13)
	movq	%rdx,88(%r13)
	movq	%r14,%r11
	shlq	$17,%r14
	movq	%r15,%r9
	shrq	$47,%r9
	shrq	$47,%r11
	orq	%r9,%r14
	shlq	$17,%r15
	orq	%r11,%r15
	movq	%r14,96(%r13)
	movq	%r15,104(%r13)
	movq	%rax,%r11
	shlq	$34,%rax
	movq	%rbx,%r9
	shrq	$30,%r9
	shrq	$30,%r11
	orq	%r9,%rax
	shlq	$34,%rbx
	orq	%r11,%rbx
	movq	%rax,112(%r13)
	movq	%rbx,120(%r13)
	movq	%r8,%r11
	shlq	$51,%r8
	movq	%r10,%r9
	shrq	$13,%r9
	shrq	$13,%r11
	orq	%r9,%r8
	shlq	$51,%r10
	orq	%r11,%r10
	movq	%r8,128(%r13)
	movq	%r10,136(%r13)
	movl	$4,%eax
.Ldone:
	movq	0(%rsp),%r15
	movq	8(%rsp),%r14
	movq	16(%rsp),%r13
	movq	24(%rsp),%rbp
	movq	32(%rsp),%rbx
	leaq	40(%rsp),%rsp
.Lkey_epilogue:
	.byte	0xf3,0xc3
.size	Camellia_Ekeygen,.-Camellia_Ekeygen
.align	64
.LCamellia_SIGMA:
.long	0x3bcc908b, 0xa09e667f, 0x4caa73b2, 0xb67ae858
.long	0xe94f82be, 0xc6ef372f, 0xf1d36f1c, 0x54ff53a5
.long	0xde682d1d, 0x10e527fa, 0xb3e6c1fd, 0xb05688c2
.long	0,          0,          0,          0
.LCamellia_SBOX:
.long	0x70707000,0x70700070
.long	0x82828200,0x2c2c002c
.long	0x2c2c2c00,0xb3b300b3
.long	0xececec00,0xc0c000c0
.long	0xb3b3b300,0xe4e400e4
.long	0x27272700,0x57570057
.long	0xc0c0c000,0xeaea00ea
.long	0xe5e5e500,0xaeae00ae
.long	0xe4e4e400,0x23230023
.long	0x85858500,0x6b6b006b
.long	0x57575700,0x45450045
.long	0x35353500,0xa5a500a5
.long	0xeaeaea00,0xeded00ed
.long	0x0c0c0c00,0x4f4f004f
.long	0xaeaeae00,0x1d1d001d
.long	0x41414100,0x92920092
.long	0x23232300,0x86860086
.long	0xefefef00,0xafaf00af
.long	0x6b6b6b00,0x7c7c007c
.long	0x93939300,0x1f1f001f
.long	0x45454500,0x3e3e003e
.long	0x19191900,0xdcdc00dc
.long	0xa5a5a500,0x5e5e005e
.long	0x21212100,0x0b0b000b
.long	0xededed00,0xa6a600a6
.long	0x0e0e0e00,0x39390039
.long	0x4f4f4f00,0xd5d500d5
.long	0x4e4e4e00,0x5d5d005d
.long	0x1d1d1d00,0xd9d900d9
.long	0x65656500,0x5a5a005a
.long	0x92929200,0x51510051
.long	0xbdbdbd00,0x6c6c006c
.long	0x86868600,0x8b8b008b
.long	0xb8b8b800,0x9a9a009a
.long	0xafafaf00,0xfbfb00fb
.long	0x8f8f8f00,0xb0b000b0
.long	0x7c7c7c00,0x74740074
.long	0xebebeb00,0x2b2b002b
.long	0x1f1f1f00,0xf0f000f0
.long	0xcecece00,0x84840084
.long	0x3e3e3e00,0xdfdf00df
.long	0x30303000,0xcbcb00cb
.long	0xdcdcdc00,0x34340034
.long	0x5f5f5f00,0x76760076
.long	0x5e5e5e00,0x6d6d006d
.long	0xc5c5c500,0xa9a900a9
.long	0x0b0b0b00,0xd1d100d1
.long	0x1a1a1a00,0x04040004
.long	0xa6a6a600,0x14140014
.long	0xe1e1e100,0x3a3a003a
.long	0x39393900,0xdede00de
.long	0xcacaca00,0x11110011
.long	0xd5d5d500,0x32320032
.long	0x47474700,0x9c9c009c
.long	0x5d5d5d00,0x53530053
.long	0x3d3d3d00,0xf2f200f2
.long	0xd9d9d900,0xfefe00fe
.long	0x01010100,0xcfcf00cf
.long	0x5a5a5a00,0xc3c300c3
.long	0xd6d6d600,0x7a7a007a
.long	0x51515100,0x24240024
.long	0x56565600,0xe8e800e8
.long	0x6c6c6c00,0x60600060
.long	0x4d4d4d00,0x69690069
.long	0x8b8b8b00,0xaaaa00aa
.long	0x0d0d0d00,0xa0a000a0
.long	0x9a9a9a00,0xa1a100a1
.long	0x66666600,0x62620062
.long	0xfbfbfb00,0x54540054
.long	0xcccccc00,0x1e1e001e
.long	0xb0b0b000,0xe0e000e0
.long	0x2d2d2d00,0x64640064
.long	0x74747400,0x10100010
.long	0x12121200,0x00000000
.long	0x2b2b2b00,0xa3a300a3
.long	0x20202000,0x75750075
.long	0xf0f0f000,0x8a8a008a
.long	0xb1b1b100,0xe6e600e6
.long	0x84848400,0x09090009
.long	0x99999900,0xdddd00dd
.long	0xdfdfdf00,0x87870087
.long	0x4c4c4c00,0x83830083
.long	0xcbcbcb00,0xcdcd00cd
.long	0xc2c2c200,0x90900090
.long	0x34343400,0x73730073
.long	0x7e7e7e00,0xf6f600f6
.long	0x76767600,0x9d9d009d
.long	0x05050500,0xbfbf00bf
.long	0x6d6d6d00,0x52520052
.long	0xb7b7b700,0xd8d800d8
.long	0xa9a9a900,0xc8c800c8
.long	0x31313100,0xc6c600c6
.long	0xd1d1d100,0x81810081
.long	0x17171700,0x6f6f006f
.long	0x04040400,0x13130013
.long	0xd7d7d700,0x63630063
.long	0x14141400,0xe9e900e9
.long	0x58585800,0xa7a700a7
.long	0x3a3a3a00,0x9f9f009f
.long	0x61616100,0xbcbc00bc
.long	0xdedede00,0x29290029
.long	0x1b1b1b00,0xf9f900f9
.long	0x11111100,0x2f2f002f
.long	0x1c1c1c00,0xb4b400b4
.long	0x32323200,0x78780078
.long	0x0f0f0f00,0x06060006
.long	0x9c9c9c00,0xe7e700e7
.long	0x16161600,0x71710071
.long	0x53535300,0xd4d400d4
.long	0x18181800,0xabab00ab
.long	0xf2f2f200,0x88880088
.long	0x22222200,0x8d8d008d
.long	0xfefefe00,0x72720072
.long	0x44444400,0xb9b900b9
.long	0xcfcfcf00,0xf8f800f8
.long	0xb2b2b200,0xacac00ac
.long	0xc3c3c300,0x36360036
.long	0xb5b5b500,0x2a2a002a
.long	0x7a7a7a00,0x3c3c003c
.long	0x91919100,0xf1f100f1
.long	0x24242400,0x40400040
.long	0x08080800,0xd3d300d3
.long	0xe8e8e800,0xbbbb00bb
.long	0xa8a8a800,0x43430043
.long	0x60606000,0x15150015
.long	0xfcfcfc00,0xadad00ad
.long	0x69696900,0x77770077
.long	0x50505000,0x80800080
.long	0xaaaaaa00,0x82820082
.long	0xd0d0d000,0xecec00ec
.long	0xa0a0a000,0x27270027
.long	0x7d7d7d00,0xe5e500e5
.long	0xa1a1a100,0x85850085
.long	0x89898900,0x35350035
.long	0x62626200,0x0c0c000c
.long	0x97979700,0x41410041
.long	0x54545400,0xefef00ef
.long	0x5b5b5b00,0x93930093
.long	0x1e1e1e00,0x19190019
.long	0x95959500,0x21210021
.long	0xe0e0e000,0x0e0e000e
.long	0xffffff00,0x4e4e004e
.long	0x64646400,0x65650065
.long	0xd2d2d200,0xbdbd00bd
.long	0x10101000,0xb8b800b8
.long	0xc4c4c400,0x8f8f008f
.long	0x00000000,0xebeb00eb
.long	0x48484800,0xcece00ce
.long	0xa3a3a300,0x30300030
.long	0xf7f7f700,0x5f5f005f
.long	0x75757500,0xc5c500c5
.long	0xdbdbdb00,0x1a1a001a
.long	0x8a8a8a00,0xe1e100e1
.long	0x03030300,0xcaca00ca
.long	0xe6e6e600,0x47470047
.long	0xdadada00,0x3d3d003d
.long	0x09090900,0x01010001
.long	0x3f3f3f00,0xd6d600d6
.long	0xdddddd00,0x56560056
.long	0x94949400,0x4d4d004d
.long	0x87878700,0x0d0d000d
.long	0x5c5c5c00,0x66660066
.long	0x83838300,0xcccc00cc
.long	0x02020200,0x2d2d002d
.long	0xcdcdcd00,0x12120012
.long	0x4a4a4a00,0x20200020
.long	0x90909000,0xb1b100b1
.long	0x33333300,0x99990099
.long	0x73737300,0x4c4c004c
.long	0x67676700,0xc2c200c2
.long	0xf6f6f600,0x7e7e007e
.long	0xf3f3f300,0x05050005
.long	0x9d9d9d00,0xb7b700b7
.long	0x7f7f7f00,0x31310031
.long	0xbfbfbf00,0x17170017
.long	0xe2e2e200,0xd7d700d7
.long	0x52525200,0x58580058
.long	0x9b9b9b00,0x61610061
.long	0xd8d8d800,0x1b1b001b
.long	0x26262600,0x1c1c001c
.long	0xc8c8c800,0x0f0f000f
.long	0x37373700,0x16160016
.long	0xc6c6c600,0x18180018
.long	0x3b3b3b00,0x22220022
.long	0x81818100,0x44440044
.long	0x96969600,0xb2b200b2
.long	0x6f6f6f00,0xb5b500b5
.long	0x4b4b4b00,0x91910091
.long	0x13131300,0x08080008
.long	0xbebebe00,0xa8a800a8
.long	0x63636300,0xfcfc00fc
.long	0x2e2e2e00,0x50500050
.long	0xe9e9e900,0xd0d000d0
.long	0x79797900,0x7d7d007d
.long	0xa7a7a700,0x89890089
.long	0x8c8c8c00,0x97970097
.long	0x9f9f9f00,0x5b5b005b
.long	0x6e6e6e00,0x95950095
.long	0xbcbcbc00,0xffff00ff
.long	0x8e8e8e00,0xd2d200d2
.long	0x29292900,0xc4c400c4
.long	0xf5f5f500,0x48480048
.long	0xf9f9f900,0xf7f700f7
.long	0xb6b6b600,0xdbdb00db
.long	0x2f2f2f00,0x03030003
.long	0xfdfdfd00,0xdada00da
.long	0xb4b4b400,0x3f3f003f
.long	0x59595900,0x94940094
.long	0x78787800,0x5c5c005c
.long	0x98989800,0x02020002
.long	0x06060600,0x4a4a004a
.long	0x6a6a6a00,0x33330033
.long	0xe7e7e700,0x67670067
.long	0x46464600,0xf3f300f3
.long	0x71717100,0x7f7f007f
.long	0xbababa00,0xe2e200e2
.long	0xd4d4d400,0x9b9b009b
.long	0x25252500,0x26260026
.long	0xababab00,0x37370037
.long	0x42424200,0x3b3b003b
.long	0x88888800,0x96960096
.long	0xa2a2a200,0x4b4b004b
.long	0x8d8d8d00,0xbebe00be
.long	0xfafafa00,0x2e2e002e
.long	0x72727200,0x79790079
.long	0x07070700,0x8c8c008c
.long	0xb9b9b900,0x6e6e006e
.long	0x55555500,0x8e8e008e
.long	0xf8f8f800,0xf5f500f5
.long	0xeeeeee00,0xb6b600b6
.long	0xacacac00,0xfdfd00fd
.long	0x0a0a0a00,0x59590059
.long	0x36363600,0x98980098
.long	0x49494900,0x6a6a006a
.long	0x2a2a2a00,0x46460046
.long	0x68686800,0xbaba00ba
.long	0x3c3c3c00,0x25250025
.long	0x38383800,0x42420042
.long	0xf1f1f100,0xa2a200a2
.long	0xa4a4a400,0xfafa00fa
.long	0x40404000,0x07070007
.long	0x28282800,0x55550055
.long	0xd3d3d300,0xeeee00ee
.long	0x7b7b7b00,0x0a0a000a
.long	0xbbbbbb00,0x49490049
.long	0xc9c9c900,0x68680068
.long	0x43434300,0x38380038
.long	0xc1c1c100,0xa4a400a4
.long	0x15151500,0x28280028
.long	0xe3e3e300,0x7b7b007b
.long	0xadadad00,0xc9c900c9
.long	0xf4f4f400,0xc1c100c1
.long	0x77777700,0xe3e300e3
.long	0xc7c7c700,0xf4f400f4
.long	0x80808000,0xc7c700c7
.long	0x9e9e9e00,0x9e9e009e
.long	0x00e0e0e0,0x38003838
.long	0x00050505,0x41004141
.long	0x00585858,0x16001616
.long	0x00d9d9d9,0x76007676
.long	0x00676767,0xd900d9d9
.long	0x004e4e4e,0x93009393
.long	0x00818181,0x60006060
.long	0x00cbcbcb,0xf200f2f2
.long	0x00c9c9c9,0x72007272
.long	0x000b0b0b,0xc200c2c2
.long	0x00aeaeae,0xab00abab
.long	0x006a6a6a,0x9a009a9a
.long	0x00d5d5d5,0x75007575
.long	0x00181818,0x06000606
.long	0x005d5d5d,0x57005757
.long	0x00828282,0xa000a0a0
.long	0x00464646,0x91009191
.long	0x00dfdfdf,0xf700f7f7
.long	0x00d6d6d6,0xb500b5b5
.long	0x00272727,0xc900c9c9
.long	0x008a8a8a,0xa200a2a2
.long	0x00323232,0x8c008c8c
.long	0x004b4b4b,0xd200d2d2
.long	0x00424242,0x90009090
.long	0x00dbdbdb,0xf600f6f6
.long	0x001c1c1c,0x07000707
.long	0x009e9e9e,0xa700a7a7
.long	0x009c9c9c,0x27002727
.long	0x003a3a3a,0x8e008e8e
.long	0x00cacaca,0xb200b2b2
.long	0x00252525,0x49004949
.long	0x007b7b7b,0xde00dede
.long	0x000d0d0d,0x43004343
.long	0x00717171,0x5c005c5c
.long	0x005f5f5f,0xd700d7d7
.long	0x001f1f1f,0xc700c7c7
.long	0x00f8f8f8,0x3e003e3e
.long	0x00d7d7d7,0xf500f5f5
.long	0x003e3e3e,0x8f008f8f
.long	0x009d9d9d,0x67006767
.long	0x007c7c7c,0x1f001f1f
.long	0x00606060,0x18001818
.long	0x00b9b9b9,0x6e006e6e
.long	0x00bebebe,0xaf00afaf
.long	0x00bcbcbc,0x2f002f2f
.long	0x008b8b8b,0xe200e2e2
.long	0x00161616,0x85008585
.long	0x00343434,0x0d000d0d
.long	0x004d4d4d,0x53005353
.long	0x00c3c3c3,0xf000f0f0
.long	0x00727272,0x9c009c9c
.long	0x00959595,0x65006565
.long	0x00ababab,0xea00eaea
.long	0x008e8e8e,0xa300a3a3
.long	0x00bababa,0xae00aeae
.long	0x007a7a7a,0x9e009e9e
.long	0x00b3b3b3,0xec00ecec
.long	0x00020202,0x80008080
.long	0x00b4b4b4,0x2d002d2d
.long	0x00adadad,0x6b006b6b
.long	0x00a2a2a2,0xa800a8a8
.long	0x00acacac,0x2b002b2b
.long	0x00d8d8d8,0x36003636
.long	0x009a9a9a,0xa600a6a6
.long	0x00171717,0xc500c5c5
.long	0x001a1a1a,0x86008686
.long	0x00353535,0x4d004d4d
.long	0x00cccccc,0x33003333
.long	0x00f7f7f7,0xfd00fdfd
.long	0x00999999,0x66006666
.long	0x00616161,0x58005858
.long	0x005a5a5a,0x96009696
.long	0x00e8e8e8,0x3a003a3a
.long	0x00242424,0x09000909
.long	0x00565656,0x95009595
.long	0x00404040,0x10001010
.long	0x00e1e1e1,0x78007878
.long	0x00636363,0xd800d8d8
.long	0x00090909,0x42004242
.long	0x00333333,0xcc00cccc
.long	0x00bfbfbf,0xef00efef
.long	0x00989898,0x26002626
.long	0x00979797,0xe500e5e5
.long	0x00858585,0x61006161
.long	0x00686868,0x1a001a1a
.long	0x00fcfcfc,0x3f003f3f
.long	0x00ececec,0x3b003b3b
.long	0x000a0a0a,0x82008282
.long	0x00dadada,0xb600b6b6
.long	0x006f6f6f,0xdb00dbdb
.long	0x00535353,0xd400d4d4
.long	0x00626262,0x98009898
.long	0x00a3a3a3,0xe800e8e8
.long	0x002e2e2e,0x8b008b8b
.long	0x00080808,0x02000202
.long	0x00afafaf,0xeb00ebeb
.long	0x00282828,0x0a000a0a
.long	0x00b0b0b0,0x2c002c2c
.long	0x00747474,0x1d001d1d
.long	0x00c2c2c2,0xb000b0b0
.long	0x00bdbdbd,0x6f006f6f
.long	0x00363636,0x8d008d8d
.long	0x00222222,0x88008888
.long	0x00383838,0x0e000e0e
.long	0x00646464,0x19001919
.long	0x001e1e1e,0x87008787
.long	0x00393939,0x4e004e4e
.long	0x002c2c2c,0x0b000b0b
.long	0x00a6a6a6,0xa900a9a9
.long	0x00303030,0x0c000c0c
.long	0x00e5e5e5,0x79007979
.long	0x00444444,0x11001111
.long	0x00fdfdfd,0x7f007f7f
.long	0x00888888,0x22002222
.long	0x009f9f9f,0xe700e7e7
.long	0x00656565,0x59005959
.long	0x00878787,0xe100e1e1
.long	0x006b6b6b,0xda00dada
.long	0x00f4f4f4,0x3d003d3d
.long	0x00232323,0xc800c8c8
.long	0x00484848,0x12001212
.long	0x00101010,0x04000404
.long	0x00d1d1d1,0x74007474
.long	0x00515151,0x54005454
.long	0x00c0c0c0,0x30003030
.long	0x00f9f9f9,0x7e007e7e
.long	0x00d2d2d2,0xb400b4b4
.long	0x00a0a0a0,0x28002828
.long	0x00555555,0x55005555
.long	0x00a1a1a1,0x68006868
.long	0x00414141,0x50005050
.long	0x00fafafa,0xbe00bebe
.long	0x00434343,0xd000d0d0
.long	0x00131313,0xc400c4c4
.long	0x00c4c4c4,0x31003131
.long	0x002f2f2f,0xcb00cbcb
.long	0x00a8a8a8,0x2a002a2a
.long	0x00b6b6b6,0xad00adad
.long	0x003c3c3c,0x0f000f0f
.long	0x002b2b2b,0xca00caca
.long	0x00c1c1c1,0x70007070
.long	0x00ffffff,0xff00ffff
.long	0x00c8c8c8,0x32003232
.long	0x00a5a5a5,0x69006969
.long	0x00202020,0x08000808
.long	0x00898989,0x62006262
.long	0x00000000,0x00000000
.long	0x00909090,0x24002424
.long	0x00474747,0xd100d1d1
.long	0x00efefef,0xfb00fbfb
.long	0x00eaeaea,0xba00baba
.long	0x00b7b7b7,0xed00eded
.long	0x00151515,0x45004545
.long	0x00060606,0x81008181
.long	0x00cdcdcd,0x73007373
.long	0x00b5b5b5,0x6d006d6d
.long	0x00121212,0x84008484
.long	0x007e7e7e,0x9f009f9f
.long	0x00bbbbbb,0xee00eeee
.long	0x00292929,0x4a004a4a
.long	0x000f0f0f,0xc300c3c3
.long	0x00b8b8b8,0x2e002e2e
.long	0x00070707,0xc100c1c1
.long	0x00040404,0x01000101
.long	0x009b9b9b,0xe600e6e6
.long	0x00949494,0x25002525
.long	0x00212121,0x48004848
.long	0x00666666,0x99009999
.long	0x00e6e6e6,0xb900b9b9
.long	0x00cecece,0xb300b3b3
.long	0x00ededed,0x7b007b7b
.long	0x00e7e7e7,0xf900f9f9
.long	0x003b3b3b,0xce00cece
.long	0x00fefefe,0xbf00bfbf
.long	0x007f7f7f,0xdf00dfdf
.long	0x00c5c5c5,0x71007171
.long	0x00a4a4a4,0x29002929
.long	0x00373737,0xcd00cdcd
.long	0x00b1b1b1,0x6c006c6c
.long	0x004c4c4c,0x13001313
.long	0x00919191,0x64006464
.long	0x006e6e6e,0x9b009b9b
.long	0x008d8d8d,0x63006363
.long	0x00767676,0x9d009d9d
.long	0x00030303,0xc000c0c0
.long	0x002d2d2d,0x4b004b4b
.long	0x00dedede,0xb700b7b7
.long	0x00969696,0xa500a5a5
.long	0x00262626,0x89008989
.long	0x007d7d7d,0x5f005f5f
.long	0x00c6c6c6,0xb100b1b1
.long	0x005c5c5c,0x17001717
.long	0x00d3d3d3,0xf400f4f4
.long	0x00f2f2f2,0xbc00bcbc
.long	0x004f4f4f,0xd300d3d3
.long	0x00191919,0x46004646
.long	0x003f3f3f,0xcf00cfcf
.long	0x00dcdcdc,0x37003737
.long	0x00797979,0x5e005e5e
.long	0x001d1d1d,0x47004747
.long	0x00525252,0x94009494
.long	0x00ebebeb,0xfa00fafa
.long	0x00f3f3f3,0xfc00fcfc
.long	0x006d6d6d,0x5b005b5b
.long	0x005e5e5e,0x97009797
.long	0x00fbfbfb,0xfe00fefe
.long	0x00696969,0x5a005a5a
.long	0x00b2b2b2,0xac00acac
.long	0x00f0f0f0,0x3c003c3c
.long	0x00313131,0x4c004c4c
.long	0x000c0c0c,0x03000303
.long	0x00d4d4d4,0x35003535
.long	0x00cfcfcf,0xf300f3f3
.long	0x008c8c8c,0x23002323
.long	0x00e2e2e2,0xb800b8b8
.long	0x00757575,0x5d005d5d
.long	0x00a9a9a9,0x6a006a6a
.long	0x004a4a4a,0x92009292
.long	0x00575757,0xd500d5d5
.long	0x00848484,0x21002121
.long	0x00111111,0x44004444
.long	0x00454545,0x51005151
.long	0x001b1b1b,0xc600c6c6
.long	0x00f5f5f5,0x7d007d7d
.long	0x00e4e4e4,0x39003939
.long	0x000e0e0e,0x83008383
.long	0x00737373,0xdc00dcdc
.long	0x00aaaaaa,0xaa00aaaa
.long	0x00f1f1f1,0x7c007c7c
.long	0x00dddddd,0x77007777
.long	0x00595959,0x56005656
.long	0x00141414,0x05000505
.long	0x006c6c6c,0x1b001b1b
.long	0x00929292,0xa400a4a4
.long	0x00545454,0x15001515
.long	0x00d0d0d0,0x34003434
.long	0x00787878,0x1e001e1e
.long	0x00707070,0x1c001c1c
.long	0x00e3e3e3,0xf800f8f8
.long	0x00494949,0x52005252
.long	0x00808080,0x20002020
.long	0x00505050,0x14001414
.long	0x00a7a7a7,0xe900e9e9
.long	0x00f6f6f6,0xbd00bdbd
.long	0x00777777,0xdd00dddd
.long	0x00939393,0xe400e4e4
.long	0x00868686,0xa100a1a1
.long	0x00838383,0xe000e0e0
.long	0x002a2a2a,0x8a008a8a
.long	0x00c7c7c7,0xf100f1f1
.long	0x005b5b5b,0xd600d6d6
.long	0x00e9e9e9,0x7a007a7a
.long	0x00eeeeee,0xbb00bbbb
.long	0x008f8f8f,0xe300e3e3
.long	0x00010101,0x40004040
.long	0x003d3d3d,0x4f004f4f
.globl	Camellia_cbc_encrypt
.type	Camellia_cbc_encrypt,@function
.align	16
Camellia_cbc_encrypt:
	cmpq	$0,%rdx
	je	.Lcbc_abort
	pushq	%rbx
	pushq	%rbp
	pushq	%r12
	pushq	%r13
	pushq	%r14
	pushq	%r15
.Lcbc_prologue:

	movq	%rsp,%rbp
	subq	$64,%rsp
	andq	$-64,%rsp



	leaq	-64-63(%rcx),%r10
	subq	%rsp,%r10
	negq	%r10
	andq	$960,%r10
	subq	%r10,%rsp


	movq	%rdi,%r12
	movq	%rsi,%r13
	movq	%r8,%rbx
	movq	%rcx,%r14
	movl	272(%rcx),%r15d

	movq	%r8,40(%rsp)
	movq	%rbp,48(%rsp)

.Lcbc_body:
	leaq	.LCamellia_SBOX@GOTPCREL(%rip),%rbp

	movl	$32,%ecx
.align	4
.Lcbc_prefetch_sbox:
	movq	0(%rbp),%rax
	movq	32(%rbp),%rsi
	movq	64(%rbp),%rdi
	movq	96(%rbp),%r11
	leaq	128(%rbp),%rbp
	loop	.Lcbc_prefetch_sbox
	subq	$4096,%rbp
	shlq	$6,%r15
	movq	%rdx,%rcx
	leaq	(%r14,%r15,1),%r15

	cmpl	$0,%r9d
	je	.LCBC_DECRYPT

	andq	$-16,%rdx
	andq	$15,%rcx
	leaq	(%r12,%rdx,1),%rdx
	movq	%r14,0(%rsp)
	movq	%rdx,8(%rsp)
	movq	%rcx,16(%rsp)

	cmpq	%r12,%rdx
	movl	0(%rbx),%r8d
	movl	4(%rbx),%r9d
	movl	8(%rbx),%r10d
	movl	12(%rbx),%r11d
	je	.Lcbc_enc_tail
	jmp	.Lcbc_eloop

.align	16
.Lcbc_eloop:
	xorl	0(%r12),%r8d
	xorl	4(%r12),%r9d
	xorl	8(%r12),%r10d
	bswapl	%r8d
	xorl	12(%r12),%r11d
	bswapl	%r9d
	bswapl	%r10d
	bswapl	%r11d

	call	_x86_64_Camellia_encrypt

	movq	0(%rsp),%r14
	bswapl	%r8d
	movq	8(%rsp),%rdx
	bswapl	%r9d
	movq	16(%rsp),%rcx
	bswapl	%r10d
	movl	%r8d,0(%r13)
	bswapl	%r11d
	movl	%r9d,4(%r13)
	movl	%r10d,8(%r13)
	leaq	16(%r12),%r12
	movl	%r11d,12(%r13)
	cmpq	%rdx,%r12
	leaq	16(%r13),%r13
	jne	.Lcbc_eloop

	cmpq	$0,%rcx
	jne	.Lcbc_enc_tail

	movq	40(%rsp),%r13
	movl	%r8d,0(%r13)
	movl	%r9d,4(%r13)
	movl	%r10d,8(%r13)
	movl	%r11d,12(%r13)
	jmp	.Lcbc_done

.align	16
.Lcbc_enc_tail:
	xorq	%rax,%rax
	movq	%rax,0+24(%rsp)
	movq	%rax,8+24(%rsp)
	movq	%rax,16(%rsp)

.Lcbc_enc_pushf:
	pushfq
	cld
	movq	%r12,%rsi
	leaq	8+24(%rsp),%rdi
.long	0x9066A4F3		
	popfq
.Lcbc_enc_popf:

	leaq	24(%rsp),%r12
	leaq	16+24(%rsp),%rax
	movq	%rax,8(%rsp)
	jmp	.Lcbc_eloop		

.align	16
.LCBC_DECRYPT:
	xchgq	%r14,%r15
	addq	$15,%rdx
	andq	$15,%rcx
	andq	$-16,%rdx
	movq	%r14,0(%rsp)
	leaq	(%r12,%rdx,1),%rdx
	movq	%rdx,8(%rsp)
	movq	%rcx,16(%rsp)

	movq	(%rbx),%rax
	movq	8(%rbx),%rbx
	jmp	.Lcbc_dloop
.align	16
.Lcbc_dloop:
	movl	0(%r12),%r8d
	movl	4(%r12),%r9d
	movl	8(%r12),%r10d
	bswapl	%r8d
	movl	12(%r12),%r11d
	bswapl	%r9d
	movq	%rax,0+24(%rsp)
	bswapl	%r10d
	movq	%rbx,8+24(%rsp)
	bswapl	%r11d

	call	_x86_64_Camellia_decrypt

	movq	0(%rsp),%r14
	movq	8(%rsp),%rdx
	movq	16(%rsp),%rcx

	bswapl	%r8d
	movq	(%r12),%rax
	bswapl	%r9d
	movq	8(%r12),%rbx
	bswapl	%r10d
	xorl	0+24(%rsp),%r8d
	bswapl	%r11d
	xorl	4+24(%rsp),%r9d
	xorl	8+24(%rsp),%r10d
	leaq	16(%r12),%r12
	xorl	12+24(%rsp),%r11d
	cmpq	%rdx,%r12
	je	.Lcbc_ddone

	movl	%r8d,0(%r13)
	movl	%r9d,4(%r13)
	movl	%r10d,8(%r13)
	movl	%r11d,12(%r13)

	leaq	16(%r13),%r13
	jmp	.Lcbc_dloop

.align	16
.Lcbc_ddone:
	movq	40(%rsp),%rdx
	cmpq	$0,%rcx
	jne	.Lcbc_dec_tail

	movl	%r8d,0(%r13)
	movl	%r9d,4(%r13)
	movl	%r10d,8(%r13)
	movl	%r11d,12(%r13)

	movq	%rax,(%rdx)
	movq	%rbx,8(%rdx)
	jmp	.Lcbc_done
.align	16
.Lcbc_dec_tail:
	movl	%r8d,0+24(%rsp)
	movl	%r9d,4+24(%rsp)
	movl	%r10d,8+24(%rsp)
	movl	%r11d,12+24(%rsp)

.Lcbc_dec_pushf:
	pushfq
	cld
	leaq	8+24(%rsp),%rsi
	leaq	(%r13),%rdi
.long	0x9066A4F3		
	popfq
.Lcbc_dec_popf:

	movq	%rax,(%rdx)
	movq	%rbx,8(%rdx)
	jmp	.Lcbc_done

.align	16
.Lcbc_done:
	movq	48(%rsp),%rcx
	movq	0(%rcx),%r15
	movq	8(%rcx),%r14
	movq	16(%rcx),%r13
	movq	24(%rcx),%r12
	movq	32(%rcx),%rbp
	movq	40(%rcx),%rbx
	leaq	48(%rcx),%rsp
.Lcbc_abort:
	.byte	0xf3,0xc3
.size	Camellia_cbc_encrypt,.-Camellia_cbc_encrypt

.byte	67,97,109,101,108,108,105,97,32,102,111,114,32,120,56,54,95,54,52,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0

File Added: src/crypto/external/bsd/openssl/lib/libcrypto/arch/x86_64/uplink-x86_64.S
.text	

.globl	OPENSSL_UplinkTable
.type	_lazy1,@function
.align	16
_lazy1:
.byte	0x48,0x83,0xEC,0x28	
	movq	%rcx,48(%rsp)
	movq	%rdx,56(%rsp)
	movq	%r8,64(%rsp)
	movq	%r9,72(%rsp)
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rcx
	movq	$1,%rdx
	call	OPENSSL_Uplink
	movq	48(%rsp),%rcx
	movq	56(%rsp),%rdx
	movq	64(%rsp),%r8
	movq	72(%rsp),%r9
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rax
	addq	$40,%rsp
	jmp	*8(%rax)
_lazy1_end:
.size	_lazy1,.-_lazy1
.type	_lazy2,@function
.align	16
_lazy2:
.byte	0x48,0x83,0xEC,0x28	
	movq	%rcx,48(%rsp)
	movq	%rdx,56(%rsp)
	movq	%r8,64(%rsp)
	movq	%r9,72(%rsp)
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rcx
	movq	$2,%rdx
	call	OPENSSL_Uplink
	movq	48(%rsp),%rcx
	movq	56(%rsp),%rdx
	movq	64(%rsp),%r8
	movq	72(%rsp),%r9
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rax
	addq	$40,%rsp
	jmp	*16(%rax)
_lazy2_end:
.size	_lazy2,.-_lazy2
.type	_lazy3,@function
.align	16
_lazy3:
.byte	0x48,0x83,0xEC,0x28	
	movq	%rcx,48(%rsp)
	movq	%rdx,56(%rsp)
	movq	%r8,64(%rsp)
	movq	%r9,72(%rsp)
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rcx
	movq	$3,%rdx
	call	OPENSSL_Uplink
	movq	48(%rsp),%rcx
	movq	56(%rsp),%rdx
	movq	64(%rsp),%r8
	movq	72(%rsp),%r9
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rax
	addq	$40,%rsp
	jmp	*24(%rax)
_lazy3_end:
.size	_lazy3,.-_lazy3
.type	_lazy4,@function
.align	16
_lazy4:
.byte	0x48,0x83,0xEC,0x28	
	movq	%rcx,48(%rsp)
	movq	%rdx,56(%rsp)
	movq	%r8,64(%rsp)
	movq	%r9,72(%rsp)
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rcx
	movq	$4,%rdx
	call	OPENSSL_Uplink
	movq	48(%rsp),%rcx
	movq	56(%rsp),%rdx
	movq	64(%rsp),%r8
	movq	72(%rsp),%r9
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rax
	addq	$40,%rsp
	jmp	*32(%rax)
_lazy4_end:
.size	_lazy4,.-_lazy4
.type	_lazy5,@function
.align	16
_lazy5:
.byte	0x48,0x83,0xEC,0x28	
	movq	%rcx,48(%rsp)
	movq	%rdx,56(%rsp)
	movq	%r8,64(%rsp)
	movq	%r9,72(%rsp)
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rcx
	movq	$5,%rdx
	call	OPENSSL_Uplink
	movq	48(%rsp),%rcx
	movq	56(%rsp),%rdx
	movq	64(%rsp),%r8
	movq	72(%rsp),%r9
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rax
	addq	$40,%rsp
	jmp	*40(%rax)
_lazy5_end:
.size	_lazy5,.-_lazy5
.type	_lazy6,@function
.align	16
_lazy6:
.byte	0x48,0x83,0xEC,0x28	
	movq	%rcx,48(%rsp)
	movq	%rdx,56(%rsp)
	movq	%r8,64(%rsp)
	movq	%r9,72(%rsp)
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rcx
	movq	$6,%rdx
	call	OPENSSL_Uplink
	movq	48(%rsp),%rcx
	movq	56(%rsp),%rdx
	movq	64(%rsp),%r8
	movq	72(%rsp),%r9
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rax
	addq	$40,%rsp
	jmp	*48(%rax)
_lazy6_end:
.size	_lazy6,.-_lazy6
.type	_lazy7,@function
.align	16
_lazy7:
.byte	0x48,0x83,0xEC,0x28	
	movq	%rcx,48(%rsp)
	movq	%rdx,56(%rsp)
	movq	%r8,64(%rsp)
	movq	%r9,72(%rsp)
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rcx
	movq	$7,%rdx
	call	OPENSSL_Uplink
	movq	48(%rsp),%rcx
	movq	56(%rsp),%rdx
	movq	64(%rsp),%r8
	movq	72(%rsp),%r9
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rax
	addq	$40,%rsp
	jmp	*56(%rax)
_lazy7_end:
.size	_lazy7,.-_lazy7
.type	_lazy8,@function
.align	16
_lazy8:
.byte	0x48,0x83,0xEC,0x28	
	movq	%rcx,48(%rsp)
	movq	%rdx,56(%rsp)
	movq	%r8,64(%rsp)
	movq	%r9,72(%rsp)
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rcx
	movq	$8,%rdx
	call	OPENSSL_Uplink
	movq	48(%rsp),%rcx
	movq	56(%rsp),%rdx
	movq	64(%rsp),%r8
	movq	72(%rsp),%r9
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rax
	addq	$40,%rsp
	jmp	*64(%rax)
_lazy8_end:
.size	_lazy8,.-_lazy8
.type	_lazy9,@function
.align	16
_lazy9:
.byte	0x48,0x83,0xEC,0x28	
	movq	%rcx,48(%rsp)
	movq	%rdx,56(%rsp)
	movq	%r8,64(%rsp)
	movq	%r9,72(%rsp)
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rcx
	movq	$9,%rdx
	call	OPENSSL_Uplink
	movq	48(%rsp),%rcx
	movq	56(%rsp),%rdx
	movq	64(%rsp),%r8
	movq	72(%rsp),%r9
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rax
	addq	$40,%rsp
	jmp	*72(%rax)
_lazy9_end:
.size	_lazy9,.-_lazy9
.type	_lazy10,@function
.align	16
_lazy10:
.byte	0x48,0x83,0xEC,0x28	
	movq	%rcx,48(%rsp)
	movq	%rdx,56(%rsp)
	movq	%r8,64(%rsp)
	movq	%r9,72(%rsp)
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rcx
	movq	$10,%rdx
	call	OPENSSL_Uplink
	movq	48(%rsp),%rcx
	movq	56(%rsp),%rdx
	movq	64(%rsp),%r8
	movq	72(%rsp),%r9
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rax
	addq	$40,%rsp
	jmp	*80(%rax)
_lazy10_end:
.size	_lazy10,.-_lazy10
.type	_lazy11,@function
.align	16
_lazy11:
.byte	0x48,0x83,0xEC,0x28	
	movq	%rcx,48(%rsp)
	movq	%rdx,56(%rsp)
	movq	%r8,64(%rsp)
	movq	%r9,72(%rsp)
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rcx
	movq	$11,%rdx
	call	OPENSSL_Uplink
	movq	48(%rsp),%rcx
	movq	56(%rsp),%rdx
	movq	64(%rsp),%r8
	movq	72(%rsp),%r9
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rax
	addq	$40,%rsp
	jmp	*88(%rax)
_lazy11_end:
.size	_lazy11,.-_lazy11
.type	_lazy12,@function
.align	16
_lazy12:
.byte	0x48,0x83,0xEC,0x28	
	movq	%rcx,48(%rsp)
	movq	%rdx,56(%rsp)
	movq	%r8,64(%rsp)
	movq	%r9,72(%rsp)
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rcx
	movq	$12,%rdx
	call	OPENSSL_Uplink
	movq	48(%rsp),%rcx
	movq	56(%rsp),%rdx
	movq	64(%rsp),%r8
	movq	72(%rsp),%r9
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rax
	addq	$40,%rsp
	jmp	*96(%rax)
_lazy12_end:
.size	_lazy12,.-_lazy12
.type	_lazy13,@function
.align	16
_lazy13:
.byte	0x48,0x83,0xEC,0x28	
	movq	%rcx,48(%rsp)
	movq	%rdx,56(%rsp)
	movq	%r8,64(%rsp)
	movq	%r9,72(%rsp)
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rcx
	movq	$13,%rdx
	call	OPENSSL_Uplink
	movq	48(%rsp),%rcx
	movq	56(%rsp),%rdx
	movq	64(%rsp),%r8
	movq	72(%rsp),%r9
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rax
	addq	$40,%rsp
	jmp	*104(%rax)
_lazy13_end:
.size	_lazy13,.-_lazy13
.type	_lazy14,@function
.align	16
_lazy14:
.byte	0x48,0x83,0xEC,0x28	
	movq	%rcx,48(%rsp)
	movq	%rdx,56(%rsp)
	movq	%r8,64(%rsp)
	movq	%r9,72(%rsp)
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rcx
	movq	$14,%rdx
	call	OPENSSL_Uplink
	movq	48(%rsp),%rcx
	movq	56(%rsp),%rdx
	movq	64(%rsp),%r8
	movq	72(%rsp),%r9
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rax
	addq	$40,%rsp
	jmp	*112(%rax)
_lazy14_end:
.size	_lazy14,.-_lazy14
.type	_lazy15,@function
.align	16
_lazy15:
.byte	0x48,0x83,0xEC,0x28	
	movq	%rcx,48(%rsp)
	movq	%rdx,56(%rsp)
	movq	%r8,64(%rsp)
	movq	%r9,72(%rsp)
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rcx
	movq	$15,%rdx
	call	OPENSSL_Uplink
	movq	48(%rsp),%rcx
	movq	56(%rsp),%rdx
	movq	64(%rsp),%r8
	movq	72(%rsp),%r9
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rax
	addq	$40,%rsp
	jmp	*120(%rax)
_lazy15_end:
.size	_lazy15,.-_lazy15
.type	_lazy16,@function
.align	16
_lazy16:
.byte	0x48,0x83,0xEC,0x28	
	movq	%rcx,48(%rsp)
	movq	%rdx,56(%rsp)
	movq	%r8,64(%rsp)
	movq	%r9,72(%rsp)
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rcx
	movq	$16,%rdx
	call	OPENSSL_Uplink
	movq	48(%rsp),%rcx
	movq	56(%rsp),%rdx
	movq	64(%rsp),%r8
	movq	72(%rsp),%r9
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rax
	addq	$40,%rsp
	jmp	*128(%rax)
_lazy16_end:
.size	_lazy16,.-_lazy16
.type	_lazy17,@function
.align	16
_lazy17:
.byte	0x48,0x83,0xEC,0x28	
	movq	%rcx,48(%rsp)
	movq	%rdx,56(%rsp)
	movq	%r8,64(%rsp)
	movq	%r9,72(%rsp)
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rcx
	movq	$17,%rdx
	call	OPENSSL_Uplink
	movq	48(%rsp),%rcx
	movq	56(%rsp),%rdx
	movq	64(%rsp),%r8
	movq	72(%rsp),%r9
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rax
	addq	$40,%rsp
	jmp	*136(%rax)
_lazy17_end:
.size	_lazy17,.-_lazy17
.type	_lazy18,@function
.align	16
_lazy18:
.byte	0x48,0x83,0xEC,0x28	
	movq	%rcx,48(%rsp)
	movq	%rdx,56(%rsp)
	movq	%r8,64(%rsp)
	movq	%r9,72(%rsp)
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rcx
	movq	$18,%rdx
	call	OPENSSL_Uplink
	movq	48(%rsp),%rcx
	movq	56(%rsp),%rdx
	movq	64(%rsp),%r8
	movq	72(%rsp),%r9
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rax
	addq	$40,%rsp
	jmp	*144(%rax)
_lazy18_end:
.size	_lazy18,.-_lazy18
.type	_lazy19,@function
.align	16
_lazy19:
.byte	0x48,0x83,0xEC,0x28	
	movq	%rcx,48(%rsp)
	movq	%rdx,56(%rsp)
	movq	%r8,64(%rsp)
	movq	%r9,72(%rsp)
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rcx
	movq	$19,%rdx
	call	OPENSSL_Uplink
	movq	48(%rsp),%rcx
	movq	56(%rsp),%rdx
	movq	64(%rsp),%r8
	movq	72(%rsp),%r9
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rax
	addq	$40,%rsp
	jmp	*152(%rax)
_lazy19_end:
.size	_lazy19,.-_lazy19
.type	_lazy20,@function
.align	16
_lazy20:
.byte	0x48,0x83,0xEC,0x28	
	movq	%rcx,48(%rsp)
	movq	%rdx,56(%rsp)
	movq	%r8,64(%rsp)
	movq	%r9,72(%rsp)
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rcx
	movq	$20,%rdx
	call	OPENSSL_Uplink
	movq	48(%rsp),%rcx
	movq	56(%rsp),%rdx
	movq	64(%rsp),%r8
	movq	72(%rsp),%r9
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rax
	addq	$40,%rsp
	jmp	*160(%rax)
_lazy20_end:
.size	_lazy20,.-_lazy20
.type	_lazy21,@function
.align	16
_lazy21:
.byte	0x48,0x83,0xEC,0x28	
	movq	%rcx,48(%rsp)
	movq	%rdx,56(%rsp)
	movq	%r8,64(%rsp)
	movq	%r9,72(%rsp)
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rcx
	movq	$21,%rdx
	call	OPENSSL_Uplink
	movq	48(%rsp),%rcx
	movq	56(%rsp),%rdx
	movq	64(%rsp),%r8
	movq	72(%rsp),%r9
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rax
	addq	$40,%rsp
	jmp	*168(%rax)
_lazy21_end:
.size	_lazy21,.-_lazy21
.type	_lazy22,@function
.align	16
_lazy22:
.byte	0x48,0x83,0xEC,0x28	
	movq	%rcx,48(%rsp)
	movq	%rdx,56(%rsp)
	movq	%r8,64(%rsp)
	movq	%r9,72(%rsp)
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rcx
	movq	$22,%rdx
	call	OPENSSL_Uplink
	movq	48(%rsp),%rcx
	movq	56(%rsp),%rdx
	movq	64(%rsp),%r8
	movq	72(%rsp),%r9
	leaq	OPENSSL_UplinkTable@GOTPCREL(%rip),%rax
	addq	$40,%rsp
	jmp	*176(%rax)
_lazy22_end:
.size	_lazy22,.-_lazy22
.data	
OPENSSL_UplinkTable:
.quad	22
.quad	_lazy1
.quad	_lazy2
.quad	_lazy3
.quad	_lazy4
.quad	_lazy5
.quad	_lazy6
.quad	_lazy7
.quad	_lazy8
.quad	_lazy9
.quad	_lazy10
.quad	_lazy11
.quad	_lazy12
.quad	_lazy13
.quad	_lazy14
.quad	_lazy15
.quad	_lazy16
.quad	_lazy17
.quad	_lazy18
.quad	_lazy19
.quad	_lazy20
.quad	_lazy21
.quad	_lazy22
.section	.pdata,"r"
.align	4
.rva	_lazy1,_lazy1_end,_lazy_unwind_info
.rva	_lazy2,_lazy2_end,_lazy_unwind_info
.rva	_lazy3,_lazy3_end,_lazy_unwind_info
.rva	_lazy4,_lazy4_end,_lazy_unwind_info
.rva	_lazy5,_lazy5_end,_lazy_unwind_info
.rva	_lazy6,_lazy6_end,_lazy_unwind_info
.rva	_lazy7,_lazy7_end,_lazy_unwind_info
.rva	_lazy8,_lazy8_end,_lazy_unwind_info
.rva	_lazy9,_lazy9_end,_lazy_unwind_info
.rva	_lazy10,_lazy10_end,_lazy_unwind_info
.rva	_lazy11,_lazy11_end,_lazy_unwind_info
.rva	_lazy12,_lazy12_end,_lazy_unwind_info
.rva	_lazy13,_lazy13_end,_lazy_unwind_info
.rva	_lazy14,_lazy14_end,_lazy_unwind_info
.rva	_lazy15,_lazy15_end,_lazy_unwind_info
.rva	_lazy16,_lazy16_end,_lazy_unwind_info
.rva	_lazy17,_lazy17_end,_lazy_unwind_info
.rva	_lazy18,_lazy18_end,_lazy_unwind_info
.rva	_lazy19,_lazy19_end,_lazy_unwind_info
.rva	_lazy20,_lazy20_end,_lazy_unwind_info
.rva	_lazy21,_lazy21_end,_lazy_unwind_info
.rva	_lazy22,_lazy22_end,_lazy_unwind_info
.section	.xdata,"r"
.align	8
_lazy_unwind_info:
.byte	0x01,0x04,0x01,0x00
.byte	0x04,0x42,0x00,0x00