[csw-devel] SF.net SVN: gar:[20404] csw/mgar/pkg/openssl1/trunk

chninkel at users.sourceforge.net chninkel at users.sourceforge.net
Sun Mar 10 21:49:54 CET 2013


Revision: 20404
          http://gar.svn.sourceforge.net/gar/?rev=20404&view=rev
Author:   chninkel
Date:     2013-03-10 20:49:54 +0000 (Sun, 10 Mar 2013)
Log Message:
-----------
openssl1/trunk: updated t4 engine patch

Modified Paths:
--------------
    csw/mgar/pkg/openssl1/trunk/Makefile
    csw/mgar/pkg/openssl1/trunk/files/more_configure_targets.patch
    csw/mgar/pkg/openssl1/trunk/files/pkcs11_engine-1.0.1b.patch.2012-04-30

Added Paths:
-----------
    csw/mgar/pkg/openssl1/trunk/files/openssl-1.0.1e-t4-engine.sparc-patch

Removed Paths:
-------------
    csw/mgar/pkg/openssl1/trunk/files/openssl-1.0.1c-t4-engine.sparc-patch.2012-05-11

Modified: csw/mgar/pkg/openssl1/trunk/Makefile
===================================================================
--- csw/mgar/pkg/openssl1/trunk/Makefile	2013-03-10 16:41:29 UTC (rev 20403)
+++ csw/mgar/pkg/openssl1/trunk/Makefile	2013-03-10 20:49:54 UTC (rev 20404)
@@ -124,7 +124,7 @@
 # support for pkcs11 engine http://blogs.sun.com/chichang1/entry/how_to_integrate_pkcs11_engine
 ifdef PKCS11
 ifneq ($(shell /usr/bin/uname -r),5.9)
-        PATCHFILES += pkcs11_engine-1.0.1b.patch.2012-04-30
+       PATCHFILES += pkcs11_engine-1.0.1b.patch.2012-04-30
 endif
 endif
 
@@ -136,7 +136,7 @@
 ifeq ($(shell /usr/bin/uname -p),sparc) 
 ifneq ($(shell /usr/bin/uname -r),5.9)
 ifneq ($(shell /usr/bin/uname -r),5.10)
-	#PATCHFILES += openssl-1.0.1c-t4-engine.sparc-patch.2012-05-11
+	PATCHFILES += openssl-1.0.1e-t4-engine.sparc-patch
 endif
 endif
 endif
@@ -186,14 +186,6 @@
 endif
 
 
-ifeq ($(shell /usr/bin/uname -r),5.11)
-GARCOMPILER = SOS12U3
-else
-GARCOMPILER = SOS12
-endif
-
-
-
 # For now we want the sun perl to be used
 CONFIGURE_ENV += PERL="/usr/bin/perl"
 
@@ -246,3 +238,5 @@
 	@$(MAKECOOKIE)
 
 post-merge: merge-doc install-conf-misc 
+
+

Modified: csw/mgar/pkg/openssl1/trunk/files/more_configure_targets.patch
===================================================================
--- csw/mgar/pkg/openssl1/trunk/files/more_configure_targets.patch	2013-03-10 16:41:29 UTC (rev 20403)
+++ csw/mgar/pkg/openssl1/trunk/files/more_configure_targets.patch	2013-03-10 20:49:54 UTC (rev 20404)
@@ -1,20 +1,31 @@
-diff -ur openssl-1.0.1e.orig/Configure openssl-1.0.1e/Configure
---- openssl-1.0.1e.orig/Configure	2013-03-10 07:52:44.423621000 +0100
-+++ openssl-1.0.1e/Configure	2013-03-10 08:15:34.966266918 +0100
-@@ -257,10 +257,16 @@
- #### Solaris configs, used for OpenSSL as delivered by S11.
- "solaris-x86-cc-sunw","cc:-m32 -xO3 -xspace -Xa::-D_REENTRANT::-lsocket -lnsl -lc:BN_LLONG RC4_CHUNK DES_PTR DES_UNROLL BF_PTR:${x86_elf_asm}:dlfcn:solaris-shared:-KPIC:-m32 -G -dy -z text -zdefs -Bdirect -zignore -M/usr/lib/ld/map.pagealign -M/usr/lib/ld/map.noexdata:.so.\$(SHLIB_MAJOR).\$(SHLIB_MINOR)",
- #
+From a693531b40e1f8d707cea5f984d935358c12f14d Mon Sep 17 00:00:00 2001
+From: Yann Rouillard <yann at pleiades.fr.eu.org>
+Date: Sun, 10 Mar 2013 12:14:09 +0100
+Subject: [PATCH] More configure targets
+
+---
+ Configure | 8 ++++++++
+ 1 file changed, 8 insertions(+)
+
+diff --git a/Configure b/Configure
+index a84e9d7..6f93ad3 100755
+--- a/Configure
++++ b/Configure
+@@ -267,6 +267,14 @@ my %table=(
+ # to discard unused sections and files when linking wanboot-openssl.o
+ "solaris64-sparcv9-cc-sunw-wanboot","cc:-xtarget=ultra -m64 -Qoption cg -xregs=no%appl -xO5 -xstrconst -xdepend -xspace -xF=%all -Xa -DB_ENDIAN::-D_REENTRANT:ULTRASPARC:-lsocket -lnsl:BN_LLONG RC4_CHUNK DES_INT DES_PTR DES_RISC1 DES_UNROLL BF_PTR:${sparcv9_asm}:dlfcn:solaris-shared:-KPIC:-m64 -G -dy -z text -zdefs -Bdirect -zignore -M/usr/lib/ld/map.pagealign:.so.\$(SHLIB_MAJOR).\$(SHLIB_MINOR):/usr/bin/ar rs::/64",
+ 
++#### More configure targets for Opencsw build
 +"solaris-x86-pentium_pro-cc-sunw","cc:-m32 -xarch=pentium_pro -xO3 -xspace -Xa::-D_REENTRANT::-lsocket -lnsl -lc:BN_LLONG RC4_CHUNK DES_PTR DES_UNROLL BF_PTR:${x86_elf_asm}:dlfcn:solaris-shared:-KPIC:-m32 -G -dy -z text -zdefs -Bdirect -zignore -M/usr/lib/ld/map.pagealign -M/usr/lib/ld/map.noexdata:.so.\$(SHLIB_MAJOR).\$(SHLIB_MINOR)",
 +#
- "solaris64-x86_64-cc-sunw","cc:-xO3 -m64 -xstrconst -Xa -DL_ENDIAN::-D_REENTRANT::-lsocket -lnsl -lc:SIXTY_FOUR_BIT_LONG RC4_CHUNK BF_PTR DES_PTR DES_INT DES_UNROLL:${x86_64_asm}:elf:dlfcn:solaris-shared:-KPIC:-m64 -G -dy -z text -zdefs -Bdirect -zignore -M/usr/lib/ld/map.pagealign -M/usr/lib/ld/map.noexdata:.so.\$(SHLIB_MAJOR).\$(SHLIB_MINOR)",
- #
 +"solaris-sparcv8-cc-sunw","cc:-xtarget=v8 -m32 -Qoption cg -xregs=no%appl -xO5 -xstrconst -xdepend -Xa -DB_ENDIAN -DBN_DIV2W::-D_REENTRANT:ULTRASPARC:-lsocket -lnsl -lc:BN_LLONG RC4_CHUNK_LL DES_PTR DES_RISC1 DES_UNROLL BF_PTR:${sparcv9_asm}:dlfcn:solaris-shared:-KPIC:-m32 -G -dy -z text -zdefs -Bdirect -zignore -M/usr/lib/ld/map.pagealign:.so.\$(SHLIB_MAJOR).\$(SHLIB_MINOR)",
 +#
- "solaris-sparcv9-cc-sunw","cc:-xtarget=ultra -m32 -Qoption cg -xregs=no%appl -xO5 -xstrconst -xdepend -Xa -DB_ENDIAN -DBN_DIV2W::-D_REENTRANT:ULTRASPARC:-lsocket -lnsl -lc:BN_LLONG RC4_CHUNK_LL DES_PTR DES_RISC1 DES_UNROLL BF_PTR:${sparcv9_asm}:dlfcn:solaris-shared:-KPIC:-m32 -G -dy -z text -zdefs -Bdirect -zignore -M/usr/lib/ld/map.pagealign:.so.\$(SHLIB_MAJOR).\$(SHLIB_MINOR)",
- #
 +"solaris-sparcv9+vis-cc-sunw","cc:-xtarget=ultra -m32 -xarch=sparcvis -Qoption cg -xregs=no%appl -xO5 -xstrconst -xdepend -Xa -DB_ENDIAN -DBN_DIV2W::-D_REENTRANT:ULTRASPARC:-lsocket -lnsl -lc:BN_LLONG RC4_CHUNK_LL DES_PTR DES_RISC1 DES_UNROLL BF_PTR:${sparcv9_asm}:dlfcn:solaris-shared:-KPIC:-m32 -G -dy -z text -zdefs -Bdirect -zignore -M/usr/lib/ld/map.pagealign:.so.\$(SHLIB_MAJOR).\$(SHLIB_MINOR)",
-+#
- "solaris64-sparcv9-cc-sunw","cc:-xtarget=ultra -m64 -Qoption cg -xregs=no%appl -xO5 -xstrconst -xdepend -xspace -Xa -DB_ENDIAN::-D_REENTRANT:ULTRASPARC:-lsocket -lnsl -lc:BN_LLONG RC4_CHUNK DES_INT DES_PTR DES_RISC1 DES_UNROLL BF_PTR:${sparcv9_asm}:dlfcn:solaris-shared:-KPIC:-m64 -G -dy -z text -zdefs -Bdirect -zignore -M/usr/lib/ld/map.pagealign:.so.\$(SHLIB_MAJOR).\$(SHLIB_MINOR):/usr/bin/ar rs::/64",
- # Option -xF=%all instructs the compiler to place functions and data
- # variables into separate section fragments. This enables the link editor
++
++
+ #### IRIX 5.x configs
+ # -mips2 flag is added by ./config when appropriate.
+ "irix-gcc","gcc:-O3 -DTERMIOS -DB_ENDIAN::(unknown):::BN_LLONG MD2_CHAR RC4_INDEX RC4_CHAR RC4_CHUNK DES_UNROLL DES_RISC2 DES_PTR BF_PTR:${mips32_asm}:o32:dlfcn:irix-shared:::.so.\$(SHLIB_MAJOR).\$(SHLIB_MINOR)",
+-- 
+1.8.1.4
+

Deleted: csw/mgar/pkg/openssl1/trunk/files/openssl-1.0.1c-t4-engine.sparc-patch.2012-05-11
===================================================================
--- csw/mgar/pkg/openssl1/trunk/files/openssl-1.0.1c-t4-engine.sparc-patch.2012-05-11	2013-03-10 16:41:29 UTC (rev 20403)
+++ csw/mgar/pkg/openssl1/trunk/files/openssl-1.0.1c-t4-engine.sparc-patch.2012-05-11	2013-03-10 20:49:54 UTC (rev 20404)
@@ -1,8744 +0,0 @@
-diff -uNr openssl-1.0.1c.orig/Configure openssl-1.0.1c/Configure
---- openssl-1.0.1c.orig/Configure	2012-05-11 22:28:08.972569770 +0200
-+++ openssl-1.0.1c/Configure	2012-05-11 22:19:17.459525000 +0200
-@@ -133,8 +133,8 @@
- 
- my $x86_64_asm="x86_64cpuid.o:x86_64-gcc.o x86_64-mont.o x86_64-mont5.o x86_64-gf2m.o modexp512-x86_64.o::aes-x86_64.o vpaes-x86_64.o bsaes-x86_64.o aesni-x86_64.o aesni-sha1-x86_64.o::md5-x86_64.o:sha1-x86_64.o sha256-x86_64.o sha512-x86_64.o::rc4-x86_64.o rc4-md5-x86_64.o:::wp-x86_64.o:cmll-x86_64.o cmll_misc.o:ghash-x86_64.o:";
- my $ia64_asm="ia64cpuid.o:bn-ia64.o ia64-mont.o::aes_core.o aes_cbc.o aes-ia64.o::md5-ia64.o:sha1-ia64.o sha256-ia64.o sha512-ia64.o::rc4-ia64.o rc4_skey.o:::::ghash-ia64.o::void";
--my $sparcv9_asm="sparcv9cap.o sparccpuid.o:bn-sparcv9.o sparcv9-mont.o sparcv9a-mont.o:des_enc-sparc.o fcrypt_b.o:aes_core.o aes_cbc.o aes-sparcv9.o:::sha1-sparcv9.o sha256-sparcv9.o sha512-sparcv9.o:::::::ghash-sparcv9.o::void";
--my $sparcv8_asm=":sparcv8.o:des_enc-sparc.o fcrypt_b.o:::::::::::::void";
-+my $sparcv9_asm="sparcv9cap.o sparccpuid.o:bn-sparcv9.o sparcv9-mont.o sparcv9a-mont.o:des_enc-sparc.o fcrypt_b.o t4_des.o:aes_core.o aes_cbc.o aes-sparcv9.o t4_aes.o::t4_md5.o:sha1-sparcv9.o sha256-sparcv9.o sha512-sparcv9.o t4_sha1.o t4_sha2.o:::::::ghash-sparcv9.o::void";
-+my $sparcv8_asm=":sparcv8.o:des_enc-sparc.o fcrypt_b.o t4_des.o:t4_aes.o::t4_md5.o:t4_sha1.o t4_sha2.o:::::::::void";
- my $alpha_asm="alphacpuid.o:bn_asm.o alpha-mont.o:::::sha1-alpha.o:::::::ghash-alpha.o::void";
- my $mips32_asm=":bn-mips.o::aes_cbc.o aes-mips.o:::sha1-mips.o sha256-mips.o::::::::";
- my $mips64_asm=":bn-mips.o mips-mont.o::aes_cbc.o aes-mips.o:::sha1-mips.o sha256-mips.o sha512-mips.o::::::::";
-@@ -246,9 +246,9 @@
- # SC5.0 note: Compiler common patch 107357-01 or later is required!
- "solaris-sparcv7-cc","cc:-xO5 -xstrconst -xdepend -Xa -DB_ENDIAN -DBN_DIV2W::-D_REENTRANT::-lsocket -lnsl -ldl:BN_LLONG RC4_CHAR RC4_CHUNK DES_PTR DES_RISC1 DES_UNROLL BF_PTR:${no_asm}:dlfcn:solaris-shared:-KPIC:-G -dy -z text:.so.\$(SHLIB_MAJOR).\$(SHLIB_MINOR)",
- "solaris-sparcv8-cc","cc:-xarch=v8 -xO5 -xstrconst -xdepend -Xa -DB_ENDIAN -DBN_DIV2W::-D_REENTRANT::-lsocket -lnsl -ldl:BN_LLONG RC4_CHAR RC4_CHUNK DES_PTR DES_RISC1 DES_UNROLL BF_PTR:${sparcv8_asm}:dlfcn:solaris-shared:-KPIC:-G -dy -z text:.so.\$(SHLIB_MAJOR).\$(SHLIB_MINOR)",
--"solaris-sparcv9-cc","cc:-m32 -xtarget=ultra -xarch=sparc -xO5 -xstrconst -xdepend -Xa -DB_ENDIAN -DBN_DIV2W::-D_REENTRANT:ULTRASPARC:-lsocket -lnsl -ldl:BN_LLONG RC4_CHAR RC4_CHUNK_LL DES_PTR DES_RISC1 DES_UNROLL BF_PTR:${sparcv9_asm}:dlfcn:solaris-shared:-KPIC:-G -dy -z text:.so.\$(SHLIB_MAJOR).\$(SHLIB_MINOR)",
--"solaris-sparcv9+vis-cc","cc:-m32 -xtarget=ultra -xarch=sparcvis -xO5 -xstrconst -xdepend -Xa -DB_ENDIAN -DBN_DIV2W::-D_REENTRANT:ULTRASPARC:-lsocket -lnsl -ldl:BN_LLONG RC4_CHAR RC4_CHUNK_LL DES_PTR DES_RISC1 DES_UNROLL BF_PTR:${sparcv9_asm}:dlfcn:solaris-shared:-KPIC:-G -dy -z text:.so.\$(SHLIB_MAJOR).\$(SHLIB_MINOR)",
--"solaris64-sparcv9-cc","cc:-m64 -xtarget=ultra -xarch=sparc -xO5 -xstrconst -xdepend -Xa -DB_ENDIAN::-D_REENTRANT:ULTRASPARC:-lsocket -lnsl -ldl:BN_LLONG RC4_CHAR RC4_CHUNK DES_INT DES_PTR DES_RISC1 DES_UNROLL BF_PTR:${sparcv9_asm}:dlfcn:solaris-shared:-KPIC:-xarch=v9 -G -dy -z text:.so.\$(SHLIB_MAJOR).\$(SHLIB_MINOR):/usr/ccs/bin/ar rs::/64",
-+"solaris-sparcv9-cc","cc:-m32 -xtarget=ultra -xarch=sparc -Qoption cg -xregs=no%appl -xO5 -xstrconst -xdepend -Xa -DB_ENDIAN -DBN_DIV2W::-D_REENTRANT:ULTRASPARC:-lsocket -lnsl -ldl -lsoftcrypto:BN_LLONG RC4_CHAR RC4_CHUNK_LL DES_PTR DES_RISC1 DES_UNROLL BF_PTR:${sparcv9_asm}:dlfcn:solaris-shared:-KPIC:-G -dy -z text:.so.\$(SHLIB_MAJOR).\$(SHLIB_MINOR)",
-+"solaris-sparcv9+vis-cc","cc:-m32 -xtarget=ultra -xarch=sparcvis -Qoption cg -xregs=no%appl -xO5 -xstrconst -xdepend -Xa -DB_ENDIAN -DBN_DIV2W::-D_REENTRANT:ULTRASPARC:-lsocket -lnsl -ldl -lsoftcrypto:BN_LLONG RC4_CHAR RC4_CHUNK_LL DES_PTR DES_RISC1 DES_UNROLL BF_PTR:${sparcv9_asm}:dlfcn:solaris-shared:-KPIC:-G -dy -z text:.so.\$(SHLIB_MAJOR).\$(SHLIB_MINOR)",
-+"solaris64-sparcv9-cc","cc:-m64 -xtarget=ultra -xarch=sparc -Qoption cg -xregs=no%appl -xO5 -xstrconst -xdepend -Xa -DB_ENDIAN::-D_REENTRANT:ULTRASPARC:-lsocket -lnsl -ldl -lsoftcrypto:BN_LLONG RC4_CHAR RC4_CHUNK DES_INT DES_PTR DES_RISC1 DES_UNROLL BF_PTR:${sparcv9_asm}:dlfcn:solaris-shared:-KPIC:-xarch=v9 -G -dy -z text:.so.\$(SHLIB_MAJOR).\$(SHLIB_MINOR):/usr/ccs/bin/ar rs::/64",
- ####
- "debug-solaris-sparcv8-cc","cc:-DBN_DEBUG -DREF_CHECK -DCONF_DEBUG -DBN_CTX_DEBUG -DCRYPTO_MDEBUG_ALL -xarch=v8 -g -O -xstrconst -Xa -DB_ENDIAN -DBN_DIV2W::-D_REENTRANT::-lsocket -lnsl -ldl:BN_LLONG RC4_CHAR RC4_CHUNK DES_PTR DES_RISC1 DES_UNROLL BF_PTR:${sparcv8_asm}:dlfcn:solaris-shared:-KPIC:-G -dy -z text:.so.\$(SHLIB_MAJOR).\$(SHLIB_MINOR)",
- "debug-solaris-sparcv9-cc","cc:-DBN_DEBUG -DREF_CHECK -DCONF_DEBUG -DBN_CTX_DEBUG -DCRYPTO_MDEBUG_ALL -xtarget=ultra -xarch=v8plus -g -O -xstrconst -Xa -DB_ENDIAN -DBN_DIV2W::-D_REENTRANT:ULTRASPARC:-lsocket -lnsl -ldl:BN_LLONG RC4_CHAR RC4_CHUNK_LL DES_PTR DES_RISC1 DES_UNROLL BF_PTR:${sparcv9_asm}:dlfcn:solaris-shared:-KPIC:-G -dy -z text:.so.\$(SHLIB_MAJOR).\$(SHLIB_MINOR)", 
-diff -uNr openssl-1.0.1c.orig/crypto/aes/Makefile openssl-1.0.1c/crypto/aes/Makefile
---- openssl-1.0.1c.orig/crypto/aes/Makefile	2011-11-14 21:42:21.000000000 +0100
-+++ openssl-1.0.1c/crypto/aes/Makefile	2012-05-11 22:19:17.481179000 +0200
-@@ -17,6 +17,10 @@
- ASFLAGS= $(INCLUDES) $(ASFLAG)
- AFLAGS= $(ASFLAGS)
- 
-+BITS:=	$(shell if grep '^CFLAG.*=.*-m64' ../../Makefile >/dev/null; \
-+		then echo 64; else echo 32; fi)
-+ASFLAGSYF= -xregsym=no -K pic -P -xarch=v9v -D_sparcv9 -D_ASM -Dsparc -m$(BITS)
-+
- GENERAL=Makefile
- #TEST=aestest.c
- TEST=
-@@ -69,6 +73,10 @@
- aes-sparcv9.s: asm/aes-sparcv9.pl
- 	$(PERL) asm/aes-sparcv9.pl $(CFLAGS) > $@
- 
-+t4_aes.o: asm/t4_aes.S
-+	as $(ASFLAGSYF) -o $@ asm/t4_aes.S
-+	elfedit -e 'cap:hw1 -and -cmp vis vis3' $@
-+
- aes-ppc.s:	asm/aes-ppc.pl
- 	$(PERL) asm/aes-ppc.pl $(PERLASM_SCHEME) $@
- 
-diff -uNr openssl-1.0.1c.orig/crypto/aes/asm/t4_aes.S openssl-1.0.1c/crypto/aes/asm/t4_aes.S
---- openssl-1.0.1c.orig/crypto/aes/asm/t4_aes.S	1970-01-01 01:00:00.000000000 +0100
-+++ openssl-1.0.1c/crypto/aes/asm/t4_aes.S	2012-05-11 21:34:40.438327000 +0200
-@@ -0,0 +1,3052 @@
-+/*
-+ * ====================================================================
-+ * Copyright (c) 1998-2011 The OpenSSL Project.  All rights reserved.
-+ *
-+ * Redistribution and use in source and binary forms, with or without
-+ * modification, are permitted provided that the following conditions
-+ * are met:
-+ *
-+ * 1. Redistributions of source code must retain the above copyright
-+ *    notice, this list of conditions and the following disclaimer.
-+ *
-+ * 2. Redistributions in binary form must reproduce the above copyright
-+ *    notice, this list of conditions and the following disclaimer in
-+ *    the documentation and/or other materials provided with the
-+ *    distribution.
-+ *
-+ * 3. All advertising materials mentioning features or use of this
-+ *    software must display the following acknowledgment:
-+ *    "This product includes software developed by the OpenSSL Project
-+ *    for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
-+ *
-+ * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
-+ *    endorse or promote products derived from this software without
-+ *    prior written permission. For written permission, please contact
-+ *    openssl-core at openssl.org.
-+ *
-+ * 5. Products derived from this software may not be called "OpenSSL"
-+ *    nor may "OpenSSL" appear in their names without prior written
-+ *    permission of the OpenSSL Project.
-+ *
-+ * 6. Redistributions of any form whatsoever must retain the following
-+ *    acknowledgment:
-+ *    "This product includes software developed by the OpenSSL Project
-+ *    for use in the OpenSSL Toolkit (http://www.openssl.org/)"
-+ *
-+ * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
-+ * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
-+ * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE OpenSSL PROJECT OR
-+ * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
-+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
-+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
-+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
-+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
-+ * OF THE POSSIBILITY OF SUCH DAMAGE.
-+ * ====================================================================
-+ */
-+
-+/*
-+ * Copyright (c) 2011, Oracle and/or its affiliates. All rights reserved.
-+ */
-+
-+/*LINTLIBRARY*/
-+
-+#if defined(lint) || defined(__lint)
-+
-+
-+#include <sys/types.h>
-+
-+/*ARGSUSED*/
-+void t4_aes_expand128(uint64_t *rk, const uint32_t *key)
-+{ return; }
-+
-+/*ARGSUSED*/
-+void t4_aes_expand192(uint64_t *rk, const uint32_t *key)
-+{ return; }
-+
-+/*ARGSUSED*/
-+void t4_aes_expand256(uint64_t *rk, const uint32_t *key)
-+{ return; }
-+
-+void t4_aes128_load_keys_for_encrypt(uint64_t *ks)
-+{ return; }
-+
-+/*ARGSUSED*/
-+void t4_aes192_load_keys_for_encrypt(uint64_t *ks)
-+{ return; }
-+
-+/*ARGSUSED*/
-+void t4_aes256_load_keys_for_encrypt(uint64_t *ks)
-+{ return; }
-+
-+/*ARGSUSED*/
-+void t4_aes128_ecb_encrypt(uint64_t *ks, uint64_t *asm_in,
-+    uint64_t * asm_out, size_t amount_to_encrypt, uint64_t *iv)
-+{ return; }
-+
-+/*ARGSUSED*/
-+void t4_aes192_ecb_encrypt(uint64_t *ks, uint64_t *asm_in,
-+    uint64_t * asm_out, size_t amount_to_encrypt, uint64_t *iv)
-+{ return; }
-+
-+/*ARGSUSED*/
-+void t4_aes256_ecb_encrypt(uint64_t *ks, uint64_t *asm_in,
-+    uint64_t * asm_out, size_t amount_to_encrypt, uint64_t *iv)
-+{ return; }
-+
-+/*ARGSUSED*/
-+void t4_aes128_cbc_encrypt(uint64_t *ks, uint64_t *asm_in,
-+    uint64_t * asm_out, size_t amount_to_encrypt, uint64_t *iv)
-+{ return; }
-+
-+/*ARGSUSED*/
-+void t4_aes192_cbc_encrypt(uint64_t *ks, uint64_t *asm_in,
-+    uint64_t * asm_out, size_t amount_to_encrypt, uint64_t *iv)
-+{ return; }
-+
-+/*ARGSUSED*/
-+void t4_aes256_cbc_encrypt(uint64_t *ks, uint64_t *asm_in,
-+    uint64_t * asm_out, size_t amount_to_encrypt, uint64_t *iv)
-+{ return; }
-+
-+/*ARGSUSED*/
-+void t4_aes128_ctr_crypt(uint64_t *ks, uint64_t *asm_in,
-+    uint64_t * asm_out, size_t amount_to_encrypt, uint64_t *iv)
-+{ return; }
-+
-+/*ARGSUSED*/
-+void t4_aes192_ctr_crypt(uint64_t *ks, uint64_t *asm_in,
-+    uint64_t * asm_out, size_t amount_to_encrypt, uint64_t *iv)
-+{ return; }
-+
-+/*ARGSUSED*/
-+void t4_aes256_ctr_crypt(uint64_t *ks, uint64_t *asm_in,
-+    uint64_t * asm_out, size_t amount_to_encrypt, uint64_t *iv)
-+{ return; }
-+
-+/*ARGSUSED*/
-+void t4_aes128_cfb128_encrypt(uint64_t *ks, uint64_t *asm_in,
-+    uint64_t * asm_out, size_t amount_to_encrypt, uint64_t *iv)
-+{ return; }
-+
-+/*ARGSUSED*/
-+void t4_aes192_cfb128_encrypt(uint64_t *ks, uint64_t *asm_in,
-+    uint64_t * asm_out, size_t amount_to_encrypt, uint64_t *iv)
-+{ return; }
-+
-+/*ARGSUSED*/
-+void t4_aes256_cfb128_encrypt(uint64_t *ks, uint64_t *asm_in,
-+    uint64_t * asm_out, size_t amount_to_encrypt, uint64_t *iv)
-+{ return; }
-+
-+void t4_aes128_load_keys_for_decrypt(uint64_t *ks)
-+{ return; }
-+
-+/*ARGSUSED*/
-+void t4_aes192_load_keys_for_decrypt(uint64_t *ks)
-+{ return; }
-+
-+/*ARGSUSED*/
-+void t4_aes256_load_keys_for_decrypt(uint64_t *ks)
-+{ return; }
-+
-+/*ARGSUSED*/
-+void t4_aes128_ecb_decrypt(uint64_t *ks, uint64_t *asm_in,
-+    uint64_t * asm_out, size_t amount_to_encrypt, uint64_t *iv)
-+{ return; }
-+
-+/*ARGSUSED*/
-+void t4_aes192_ecb_decrypt(uint64_t *ks, uint64_t *asm_in,
-+    uint64_t * asm_out, size_t amount_to_encrypt, uint64_t *iv)
-+{ return; }
-+
-+/*ARGSUSED*/
-+void t4_aes256_ecb_decrypt(uint64_t *ks, uint64_t *asm_in,
-+    uint64_t * asm_out, size_t amount_to_encrypt, uint64_t *iv)
-+{ return; }
-+
-+/*ARGSUSED*/
-+void t4_aes128_cbc_decrypt(uint64_t *ks, uint64_t *asm_in,
-+    uint64_t * asm_out, size_t amount_to_encrypt, uint64_t *iv)
-+{ return; }
-+
-+/*ARGSUSED*/
-+void t4_aes192_cbc_decrypt(uint64_t *ks, uint64_t *asm_in,
-+    uint64_t * asm_out, size_t amount_to_encrypt, uint64_t *iv)
-+{ return; }
-+
-+/*ARGSUSED*/
-+void t4_aes256_cbc_decrypt(uint64_t *ks, uint64_t *asm_in,
-+    uint64_t * asm_out, size_t amount_to_encrypt, uint64_t *iv)
-+{ return; }
-+
-+/*ARGSUSED*/
-+void t4_aes128_cfb128_decrypt(uint64_t *ks, uint64_t *asm_in,
-+    uint64_t * asm_out, size_t amount_to_encrypt, uint64_t *iv)
-+{ return; }
-+
-+/*ARGSUSED*/
-+void t4_aes192_cfb128_decrypt(uint64_t *ks, uint64_t *asm_in,
-+    uint64_t * asm_out, size_t amount_to_encrypt, uint64_t *iv)
-+{ return; }
-+
-+/*ARGSUSED*/
-+void t4_aes256_cfb128_decrypt(uint64_t *ks, uint64_t *asm_in,
-+    uint64_t * asm_out, size_t amount_to_encrypt, uint64_t *iv)
-+{ return; }
-+
-+#else	/* lint || __lint */
-+
-+#include<sys/asm_linkage.h>
-+
-+
-+	ENTRY(t4_aes_expand128)
-+
-+!load key
-+	ld	[%o1], %f0
-+	ld	[%o1 + 0x4], %f1
-+	ld	[%o1 + 0x8], %f2
-+	ld	[%o1 + 0xc], %f3
-+
-+!expand the key
-+	!aes_kexpand1 %f0, %f2, 0x0, %f4
-+	!aes_kexpand2 %f2, %f4, %f6
-+	!aes_kexpand1 %f4, %f6, 0x1, %f8
-+	!aes_kexpand2 %f6, %f8, %f10
-+	!aes_kexpand1 %f8, %f10, 0x2, %f12
-+	!aes_kexpand2 %f10, %f12, %f14
-+	!aes_kexpand1 %f12, %f14, 0x3, %f16
-+	!aes_kexpand2 %f14, %f16, %f18
-+	!aes_kexpand1 %f16, %f18, 0x4, %f20
-+	!aes_kexpand2 %f18, %f20, %f22
-+	!aes_kexpand1 %f20, %f22, 0x5, %f24
-+	!aes_kexpand2 %f22, %f24, %f26
-+	!aes_kexpand1 %f24, %f26, 0x6, %f28
-+	!aes_kexpand2 %f26, %f28, %f30
-+	!aes_kexpand1 %f28, %f30, 0x7, %f32
-+	!aes_kexpand2 %f30, %f32, %f34
-+	!aes_kexpand1 %f32, %f34, 0x8, %f36
-+	!aes_kexpand2 %f34, %f36, %f38
-+	!aes_kexpand1 %f36, %f38, 0x9, %f40
-+	!aes_kexpand2 %f38, %f40, %f42
-+	.byte	0x88, 0xc8, 0x01, 0x02
-+	.byte	0x8d, 0xb0, 0xa6, 0x24
-+	.byte	0x90, 0xc9, 0x03, 0x06
-+	.byte	0x95, 0xb1, 0xa6, 0x28
-+	.byte	0x98, 0xca, 0x05, 0x0a
-+	.byte	0x9d, 0xb2, 0xa6, 0x2c
-+	.byte	0xa0, 0xcb, 0x07, 0x0e
-+	.byte	0xa5, 0xb3, 0xa6, 0x30
-+	.byte	0xa8, 0xcc, 0x09, 0x12
-+	.byte	0xad, 0xb4, 0xa6, 0x34
-+	.byte	0xb0, 0xcd, 0x0b, 0x16
-+	.byte	0xb5, 0xb5, 0xa6, 0x38
-+	.byte	0xb8, 0xce, 0x0d, 0x1a
-+	.byte	0xbd, 0xb6, 0xa6, 0x3c
-+	.byte	0x82, 0xcf, 0x0f, 0x1e
-+	.byte	0x87, 0xb7, 0xa6, 0x21
-+	.byte	0x8a, 0xc8, 0x51, 0x03
-+	.byte	0x8f, 0xb0, 0xe6, 0x25
-+	.byte	0x92, 0xc9, 0x53, 0x07
-+	.byte	0x97, 0xb1, 0xe6, 0x29
-+
-+!copy expanded key back into array
-+	std	%f4, [%o0]
-+	std	%f6, [%o0 + 0x8]
-+	std	%f8, [%o0 + 0x10]
-+	std	%f10, [%o0 + 0x18]
-+	std	%f12, [%o0 + 0x20]
-+	std	%f14, [%o0 + 0x28]
-+	std	%f16, [%o0 + 0x30]
-+	std	%f18, [%o0 + 0x38]
-+	std	%f20, [%o0 + 0x40]
-+	std	%f22, [%o0 + 0x48]
-+	std	%f24, [%o0 + 0x50]
-+	std	%f26, [%o0 + 0x58]
-+	std	%f28, [%o0 + 0x60]
-+	std	%f30, [%o0 + 0x68]
-+	std	%f32, [%o0 + 0x70]
-+	std	%f34, [%o0 + 0x78]
-+	std	%f36, [%o0 + 0x80]
-+	std	%f38, [%o0 + 0x88]
-+	std	%f40, [%o0 + 0x90]
-+	retl
-+	std	%f42, [%o0 + 0x98]
-+
-+	SET_SIZE(t4_aes_expand128)
-+
-+
-+	ENTRY(t4_aes_expand192)
-+
-+!load key
-+	ld	[%o1], %f0
-+	ld	[%o1 + 0x4], %f1
-+	ld	[%o1 + 0x8], %f2
-+	ld	[%o1 + 0xc], %f3
-+	ld	[%o1 + 0x10], %f4
-+	ld	[%o1 + 0x14], %f5
-+
-+!expand the key
-+	!aes_kexpand1 %f0, %f4, 0x0, %f6
-+	!aes_kexpand2 %f2, %f6, %f8
-+	!aes_kexpand2 %f4, %f8, %f10
-+
-+	!aes_kexpand1 %f6, %f10, 0x1, %f12
-+	!aes_kexpand2 %f8, %f12, %f14
-+	!aes_kexpand2 %f10, %f14, %f16
-+
-+	!aes_kexpand1 %f12, %f16, 0x2, %f18
-+	!aes_kexpand2 %f14, %f18, %f20
-+	!aes_kexpand2 %f16, %f20, %f22
-+
-+	!aes_kexpand1 %f18, %f22, 0x3, %f24
-+	!aes_kexpand2 %f20, %f24, %f26
-+	!aes_kexpand2 %f22, %f26, %f28
-+
-+	!aes_kexpand1 %f24, %f28, 0x4, %f30
-+	!aes_kexpand2 %f26, %f30, %f32
-+	!aes_kexpand2 %f28, %f32, %f34
-+
-+	!aes_kexpand1 %f30, %f34, 0x5, %f36
-+	!aes_kexpand2 %f32, %f36, %f38
-+	!aes_kexpand2 %f34, %f38, %f40
-+
-+	!aes_kexpand1 %f36, %f40, 0x6, %f42
-+	!aes_kexpand2 %f38, %f42, %f44
-+	!aes_kexpand2 %f40, %f44, %f46
-+
-+	!aes_kexpand1 %f42, %f46, 0x7, %f48
-+	!aes_kexpand2 %f44, %f48, %f50
-+	.byte	0x8c, 0xc8, 0x01, 0x04
-+	.byte	0x91, 0xb0, 0xa6, 0x26
-+	.byte	0x95, 0xb1, 0x26, 0x28
-+	.byte	0x98, 0xc9, 0x83, 0x0a
-+	.byte	0x9d, 0xb2, 0x26, 0x2c
-+	.byte	0xa1, 0xb2, 0xa6, 0x2e
-+	.byte	0xa4, 0xcb, 0x05, 0x10
-+	.byte	0xa9, 0xb3, 0xa6, 0x32
-+	.byte	0xad, 0xb4, 0x26, 0x34
-+	.byte	0xb0, 0xcc, 0x87, 0x16
-+	.byte	0xb5, 0xb5, 0x26, 0x38
-+	.byte	0xb9, 0xb5, 0xa6, 0x3a
-+	.byte	0xbc, 0xce, 0x09, 0x1c
-+	.byte	0x83, 0xb6, 0xa6, 0x3e
-+	.byte	0x87, 0xb7, 0x26, 0x21
-+	.byte	0x8a, 0xcf, 0x8b, 0x03
-+	.byte	0x8f, 0xb0, 0x66, 0x25
-+	.byte	0x93, 0xb0, 0xe6, 0x27
-+	.byte	0x96, 0xc9, 0x4d, 0x09
-+	.byte	0x9b, 0xb1, 0xe6, 0x2b
-+	.byte	0x9f, 0xb2, 0x66, 0x2d
-+	.byte	0xa2, 0xca, 0xcf, 0x0f
-+	.byte	0xa7, 0xb3, 0x66, 0x31
-+
-+!copy expanded key back into array
-+	std	%f6, [%o0]
-+	std	%f8, [%o0 + 0x8]
-+	std	%f10, [%o0 + 0x10]
-+	std	%f12, [%o0 + 0x18]
-+	std	%f14, [%o0 + 0x20]
-+	std	%f16, [%o0 + 0x28]
-+	std	%f18, [%o0 + 0x30]
-+	std	%f20, [%o0 + 0x38]
-+	std	%f22, [%o0 + 0x40]
-+	std	%f24, [%o0 + 0x48]
-+	std	%f26, [%o0 + 0x50]
-+	std	%f28, [%o0 + 0x58]
-+	std	%f30, [%o0 + 0x60]
-+	std	%f32, [%o0 + 0x68]
-+	std	%f34, [%o0 + 0x70]
-+	std	%f36, [%o0 + 0x78]
-+	std	%f38, [%o0 + 0x80]
-+	std	%f40, [%o0 + 0x88]
-+	std	%f42, [%o0 + 0x90]
-+	std	%f44, [%o0 + 0x98]
-+	std	%f46, [%o0 + 0xa0]
-+	std	%f48, [%o0 + 0xa8]
-+	retl
-+	std	%f50, [%o0 + 0xb0]
-+
-+	SET_SIZE(t4_aes_expand192)
-+
-+
-+	ENTRY(t4_aes_expand256)
-+
-+!load key
-+	ld	[%o1], %f0
-+	ld	[%o1 + 0x4], %f1
-+	ld	[%o1 + 0x8], %f2
-+	ld	[%o1 + 0xc], %f3
-+	ld	[%o1 + 0x10], %f4
-+	ld	[%o1 + 0x14], %f5
-+	ld	[%o1 + 0x18], %f6
-+	ld	[%o1 + 0x1c], %f7
-+
-+!expand the key
-+	!aes_kexpand1 %f0, %f6, 0x0, %f8
-+	!aes_kexpand2 %f2, %f8, %f10
-+	!aes_kexpand0 %f4, %f10, %f12
-+	!aes_kexpand2 %f6, %f12, %f14
-+
-+	!aes_kexpand1 %f8, %f14, 0x1, %f16
-+	!aes_kexpand2 %f10, %f16, %f18
-+	!aes_kexpand0 %f12, %f18, %f20
-+	!aes_kexpand2 %f14, %f20, %f22
-+
-+	!aes_kexpand1 %f16, %f22, 0x2, %f24
-+	!aes_kexpand2 %f18, %f24, %f26
-+	!aes_kexpand0 %f20, %f26, %f28
-+	!aes_kexpand2 %f22, %f28, %f30
-+
-+	!aes_kexpand1 %f24, %f30, 0x3, %f32
-+	!aes_kexpand2 %f26, %f32, %f34
-+	!aes_kexpand0 %f28, %f34, %f36
-+	!aes_kexpand2 %f30, %f36, %f38
-+
-+	!aes_kexpand1 %f32, %f38, 0x4, %f40
-+	!aes_kexpand2 %f34, %f40, %f42
-+	!aes_kexpand0 %f36, %f42, %f44
-+	!aes_kexpand2 %f38, %f44, %f46
-+
-+	!aes_kexpand1 %f40, %f46, 0x5, %f48
-+	!aes_kexpand2 %f42, %f48, %f50
-+	!aes_kexpand0 %f44, %f50, %f52
-+	!aes_kexpand2 %f46, %f52, %f54
-+
-+	!aes_kexpand1 %f48, %f54, 0x6, %f56
-+	!aes_kexpand2 %f50, %f56, %f58
-+	.byte	0x90, 0xc8, 0x01, 0x06
-+	.byte	0x95, 0xb0, 0xa6, 0x28
-+	.byte	0x99, 0xb1, 0x26, 0x0a
-+	.byte	0x9d, 0xb1, 0xa6, 0x2c
-+	.byte	0xa0, 0xca, 0x03, 0x0e
-+	.byte	0xa5, 0xb2, 0xa6, 0x30
-+	.byte	0xa9, 0xb3, 0x26, 0x12
-+	.byte	0xad, 0xb3, 0xa6, 0x34
-+	.byte	0xb0, 0xcc, 0x05, 0x16
-+	.byte	0xb5, 0xb4, 0xa6, 0x38
-+	.byte	0xb9, 0xb5, 0x26, 0x1a
-+	.byte	0xbd, 0xb5, 0xa6, 0x3c
-+	.byte	0x82, 0xce, 0x07, 0x1e
-+	.byte	0x87, 0xb6, 0xa6, 0x21
-+	.byte	0x8b, 0xb7, 0x26, 0x03
-+	.byte	0x8f, 0xb7, 0xa6, 0x25
-+	.byte	0x92, 0xc8, 0x49, 0x07
-+	.byte	0x97, 0xb0, 0xe6, 0x29
-+	.byte	0x9b, 0xb1, 0x66, 0x0b
-+	.byte	0x9f, 0xb1, 0xe6, 0x2d
-+	.byte	0xa2, 0xca, 0x4b, 0x0f
-+	.byte	0xa7, 0xb2, 0xe6, 0x31
-+	.byte	0xab, 0xb3, 0x66, 0x13
-+	.byte	0xaf, 0xb3, 0xe6, 0x35
-+	.byte	0xb2, 0xcc, 0x4d, 0x17
-+	.byte	0xb7, 0xb4, 0xe6, 0x39
-+
-+!copy expanded key back into array
-+	std	%f8, [%o0]
-+	std	%f10, [%o0 + 0x8]
-+	std	%f12, [%o0 + 0x10]
-+	std	%f14, [%o0 + 0x18]
-+	std	%f16, [%o0 + 0x20]
-+	std	%f18, [%o0 + 0x28]
-+	std	%f20, [%o0 + 0x30]
-+	std	%f22, [%o0 + 0x38]
-+	std	%f24, [%o0 + 0x40]
-+	std	%f26, [%o0 + 0x48]
-+	std	%f28, [%o0 + 0x50]
-+	std	%f30, [%o0 + 0x58]
-+	std	%f32, [%o0 + 0x60]
-+	std	%f34, [%o0 + 0x68]
-+	std	%f36, [%o0 + 0x70]
-+	std	%f38, [%o0 + 0x78]
-+	std	%f40, [%o0 + 0x80]
-+	std	%f42, [%o0 + 0x88]
-+	std	%f44, [%o0 + 0x90]
-+	std	%f46, [%o0 + 0x98]
-+	std	%f48, [%o0 + 0xa0]
-+	std	%f50, [%o0 + 0xa8]
-+	std	%f52, [%o0 + 0xb0]
-+	std	%f54, [%o0 + 0xb8]
-+	std	%f56, [%o0 + 0xc0]
-+	retl
-+	std	%f58, [%o0 + 0xc8]
-+
-+	SET_SIZE(t4_aes_expand256)
-+
-+
-+#define	FIRST_TWO_EROUNDS \
-+	.byte	0xb2, 0xc8, 0x3e, 0x1d ; \
-+	.byte	0xb6, 0xc8, 0xbe, 0x3d ; \
-+	.byte	0xba, 0xc9, 0x36, 0x19 ; \
-+	.byte	0xbe, 0xc9, 0xb6, 0x39
-+	!aes_eround01	%f0, %f60, %f62, %f56 ; \
-+	!aes_eround23	%f2, %f60, %f62, %f58 ; \
-+	!aes_eround01	%f4, %f56, %f58, %f60 ; \
-+	!aes_eround23	%f6, %f56, %f58, %f62
-+
-+#define	MID_TWO_EROUNDS \
-+	.byte	0xb2, 0xca, 0x3e, 0x1d ; \
-+	.byte	0xb6, 0xca, 0xbe, 0x3d ; \
-+	.byte	0xba, 0xcb, 0x36, 0x19 ; \
-+	.byte	0xbe, 0xcb, 0xb6, 0x39
-+	!aes_eround01	%f8, %f60, %f62, %f56 ; \
-+	!aes_eround23	%f10, %f60, %f62, %f58 ; \
-+	!aes_eround01	%f12, %f56, %f58, %f60 ; \
-+	!aes_eround23	%f14, %f56, %f58, %f62
-+
-+#define	MID_TWO_EROUNDS_2 \
-+	.byte	0x8c, 0xca, 0x04, 0x00 ; \
-+	.byte	0x88, 0xca, 0x84, 0x20 ; \
-+	.byte	0xb2, 0xca, 0x3e, 0x1d ; \
-+	.byte	0xb6, 0xca, 0xbe, 0x3d ; \
-+	.byte	0x80, 0xcb, 0x08, 0x06 ; \
-+	.byte	0x84, 0xcb, 0x88, 0x26 ; \
-+	.byte	0xba, 0xcb, 0x36, 0x19 ; \
-+	.byte	0xbe, 0xcb, 0xb6, 0x39
-+	!aes_eround01	%f8, %f0, %f2, %f6 ; \
-+	!aes_eround23	%f10, %f0, %f2, %f4 ; \
-+	!aes_eround01	%f8, %f60, %f62, %f56 ; \
-+	!aes_eround23	%f10, %f60, %f62, %f58 ; \
-+	!aes_eround01	%f12, %f6, %f4, %f0 ; \
-+	!aes_eround23	%f14, %f6, %f4, %f2 ; \
-+	!aes_eround01	%f12, %f56, %f58, %f60 ; \
-+	!aes_eround23	%f14, %f56, %f58, %f62
-+
-+#define	TEN_EROUNDS \
-+	.byte	0xb2, 0xcc, 0x3e, 0x1d ; \
-+	.byte	0xb6, 0xcc, 0xbe, 0x3d ; \
-+	.byte	0xba, 0xcd, 0x36, 0x19 ; \
-+	.byte	0xbe, 0xcd, 0xb6, 0x39 ; \
-+	.byte	0xb2, 0xce, 0x3e, 0x1d ; \
-+	.byte	0xb6, 0xce, 0xbe, 0x3d ; \
-+	.byte	0xba, 0xcf, 0x36, 0x19 ; \
-+	.byte	0xbe, 0xcf, 0xb6, 0x39 ; \
-+	.byte	0xb2, 0xc8, 0x7e, 0x1d ; \
-+	.byte	0xb6, 0xc8, 0xfe, 0x3d ; \
-+	.byte	0xba, 0xc9, 0x76, 0x19 ; \
-+	.byte	0xbe, 0xc9, 0xf6, 0x39 ; \
-+	.byte	0xb2, 0xca, 0x7e, 0x1d ; \
-+	.byte	0xb6, 0xca, 0xfe, 0x3d ; \
-+	.byte	0xba, 0xcb, 0x76, 0x19 ; \
-+	.byte	0xbe, 0xcb, 0xf6, 0x39 ; \
-+	.byte	0xb2, 0xcc, 0x7e, 0x1d ; \
-+	.byte	0xb6, 0xcc, 0xfe, 0x3d ; \
-+	.byte	0xba, 0xcd, 0x76, 0x99 ; \
-+	.byte	0xbe, 0xcd, 0xf6, 0xb9
-+	!aes_eround01	%f16, %f60, %f62, %f56 ; \
-+	!aes_eround23	%f18, %f60, %f62, %f58 ; \
-+	!aes_eround01	%f20, %f56, %f58, %f60 ; \
-+	!aes_eround23	%f22, %f56, %f58, %f62 ; \
-+	!aes_eround01	%f24, %f60, %f62, %f56 ; \
-+	!aes_eround23	%f26, %f60, %f62, %f58 ; \
-+	!aes_eround01	%f28, %f56, %f58, %f60 ; \
-+	!aes_eround23	%f30, %f56, %f58, %f62 ; \
-+	!aes_eround01	%f32, %f60, %f62, %f56 ; \
-+	!aes_eround23	%f34, %f60, %f62, %f58 ; \
-+	!aes_eround01	%f36, %f56, %f58, %f60 ; \
-+	!aes_eround23	%f38, %f56, %f58, %f62 ; \
-+	!aes_eround01	%f40, %f60, %f62, %f56 ; \
-+	!aes_eround23	%f42, %f60, %f62, %f58 ; \
-+	!aes_eround01	%f44, %f56, %f58, %f60 ; \
-+	!aes_eround23	%f46, %f56, %f58, %f62 ; \
-+	!aes_eround01	%f48, %f60, %f62, %f56 ; \
-+	!aes_eround23	%f50, %f60, %f62, %f58 ; \
-+	!aes_eround01_l	%f52, %f56, %f58, %f60 ; \
-+	!aes_eround23_l	%f54, %f56, %f58, %f62
-+
-+#define	TEN_EROUNDS_2 \
-+	.byte	0x8c, 0xcc, 0x04, 0x00 ; \
-+	.byte	0x88, 0xcc, 0x84, 0x20 ; \
-+	.byte	0xb2, 0xcc, 0x3e, 0x1d ; \
-+	.byte	0xb6, 0xcc, 0xbe, 0x3d ; \
-+	.byte	0x80, 0xcd, 0x08, 0x06 ; \
-+	.byte	0x84, 0xcd, 0x88, 0x26 ; \
-+	.byte	0xba, 0xcd, 0x36, 0x19 ; \
-+	.byte	0xbe, 0xcd, 0xb6, 0x39 ; \
-+	.byte	0x8c, 0xce, 0x04, 0x00 ; \
-+	.byte	0x88, 0xce, 0x84, 0x20 ; \
-+	.byte	0xb2, 0xce, 0x3e, 0x1d ; \
-+	.byte	0xb6, 0xce, 0xbe, 0x3d ; \
-+	.byte	0x80, 0xcf, 0x08, 0x06 ; \
-+	.byte	0x84, 0xcf, 0x88, 0x26 ; \
-+	.byte	0xba, 0xcf, 0x36, 0x19 ; \
-+	.byte	0xbe, 0xcf, 0xb6, 0x39 ; \
-+	.byte	0x8c, 0xc8, 0x44, 0x00 ; \
-+	.byte	0x88, 0xc8, 0xc4, 0x20 ; \
-+	.byte	0xb2, 0xc8, 0x7e, 0x1d ; \
-+	.byte	0xb6, 0xc8, 0xfe, 0x3d ; \
-+	.byte	0x80, 0xc9, 0x48, 0x06 ; \
-+	.byte	0x84, 0xc9, 0xc8, 0x26 ; \
-+	.byte	0xba, 0xc9, 0x76, 0x19 ; \
-+	.byte	0xbe, 0xc9, 0xf6, 0x39 ; \
-+	.byte	0x8c, 0xca, 0x44, 0x00 ; \
-+	.byte	0x88, 0xca, 0xc4, 0x20 ; \
-+	.byte	0xb2, 0xca, 0x7e, 0x1d ; \
-+	.byte	0xb6, 0xca, 0xfe, 0x3d ; \
-+	.byte	0x80, 0xcb, 0x48, 0x06 ; \
-+	.byte	0x84, 0xcb, 0xc8, 0x26 ; \
-+	.byte	0xba, 0xcb, 0x76, 0x19 ; \
-+	.byte	0xbe, 0xcb, 0xf6, 0x39 ; \
-+	.byte	0x8c, 0xcc, 0x44, 0x00 ; \
-+	.byte	0x88, 0xcc, 0xc4, 0x20 ; \
-+	.byte	0xb2, 0xcc, 0x7e, 0x1d ; \
-+	.byte	0xb6, 0xcc, 0xfe, 0x3d ; \
-+	.byte	0x80, 0xcd, 0x48, 0x86 ; \
-+	.byte	0x84, 0xcd, 0xc8, 0xa6 ; \
-+	.byte	0xba, 0xcd, 0x76, 0x99 ; \
-+	.byte	0xbe, 0xcd, 0xf6, 0xb9
-+	!aes_eround01	%f16, %f0, %f2, %f6 ; \
-+	!aes_eround23	%f18, %f0, %f2, %f4 ; \
-+	!aes_eround01	%f16, %f60, %f62, %f56 ; \
-+	!aes_eround23	%f18, %f60, %f62, %f58 ; \
-+	!aes_eround01	%f20, %f6, %f4, %f0 ; \
-+	!aes_eround23	%f22, %f6, %f4, %f2 ; \
-+	!aes_eround01	%f20, %f56, %f58, %f60 ; \
-+	!aes_eround23	%f22, %f56, %f58, %f62 ; \
-+	!aes_eround01	%f24, %f0, %f2, %f6 ; \
-+	!aes_eround23	%f26, %f0, %f2, %f4 ; \
-+	!aes_eround01	%f24, %f60, %f62, %f56 ; \
-+	!aes_eround23	%f26, %f60, %f62, %f58 ; \
-+	!aes_eround01	%f28, %f6, %f4, %f0 ; \
-+	!aes_eround23	%f30, %f6, %f4, %f2 ; \
-+	!aes_eround01	%f28, %f56, %f58, %f60 ; \
-+	!aes_eround23	%f30, %f56, %f58, %f62 ; \
-+	!aes_eround01	%f32, %f0, %f2, %f6 ; \
-+	!aes_eround23	%f34, %f0, %f2, %f4 ; \
-+	!aes_eround01	%f32, %f60, %f62, %f56 ; \
-+	!aes_eround23	%f34, %f60, %f62, %f58 ; \
-+	!aes_eround01	%f36, %f6, %f4, %f0 ; \
-+	!aes_eround23	%f38, %f6, %f4, %f2 ; \
-+	!aes_eround01	%f36, %f56, %f58, %f60 ; \
-+	!aes_eround23	%f38, %f56, %f58, %f62 ; \
-+	!aes_eround01	%f40, %f0, %f2, %f6 ; \
-+	!aes_eround23	%f42, %f0, %f2, %f4 ; \
-+	!aes_eround01	%f40, %f60, %f62, %f56 ; \
-+	!aes_eround23	%f42, %f60, %f62, %f58 ; \
-+	!aes_eround01	%f44, %f6, %f4, %f0 ; \
-+	!aes_eround23	%f46, %f6, %f4, %f2 ; \
-+	!aes_eround01	%f44, %f56, %f58, %f60 ; \
-+	!aes_eround23	%f46, %f56, %f58, %f62 ; \
-+	!aes_eround01	%f48, %f0, %f2, %f6 ; \
-+	!aes_eround23	%f50, %f0, %f2, %f4 ; \
-+	!aes_eround01	%f48, %f60, %f62, %f56 ; \
-+	!aes_eround23	%f50, %f60, %f62, %f58 ; \
-+	!aes_eround01_l	%f52, %f6, %f4, %f0 ; \
-+	!aes_eround23_l	%f54, %f6, %f4, %f2 ; \
-+	!aes_eround01_l	%f52, %f56, %f58, %f60 ; \
-+	!aes_eround23_l	%f54, %f56, %f58, %f62
-+
-+#define	TWELVE_EROUNDS \
-+	MID_TWO_EROUNDS	; \
-+	TEN_EROUNDS
-+
-+#define	TWELVE_EROUNDS_2 \
-+	MID_TWO_EROUNDS_2	; \
-+	TEN_EROUNDS_2
-+
-+#define	FOURTEEN_EROUNDS \
-+	FIRST_TWO_EROUNDS ; \
-+	TWELVE_EROUNDS
-+
-+#define	FOURTEEN_EROUNDS_2 \
-+	.byte	0xb0, 0xc8, 0x2c, 0x14 ; \
-+	.byte	0xac, 0xc8, 0xac, 0x34 ; \
-+	ldd	[%o0 + 0x60], %f20 ; \
-+	.byte	0xb2, 0xc8, 0x3e, 0x1d ; \
-+	.byte	0xb6, 0xc8, 0xbe, 0x3d ; \
-+	.byte	0x80, 0xc9, 0x2c, 0x18 ; \
-+	.byte	0x84, 0xc9, 0xac, 0x38 ;\
-+	ldd	[%o0 + 0x68], %f22 ; \
-+	.byte	0xba, 0xc9, 0x36, 0x19 ; \
-+	ldd	[%o0 + 0x70], %f24 ; \
-+	.byte	0xbe, 0xc9, 0xb6, 0x39 ; \
-+	.byte	0x8c, 0xca, 0x04, 0x00 ; \
-+	.byte	0x88, 0xca, 0x84, 0x20 ; \
-+	.byte	0xb2, 0xca, 0x3e, 0x1d ; \
-+	.byte	0xb6, 0xca, 0xbe, 0x3d ; \
-+	.byte	0x80, 0xcb, 0x08, 0x06 ; \
-+	.byte	0x84, 0xcb, 0x88, 0x26 ; \
-+	.byte	0xba, 0xcb, 0x36, 0x19 ; \
-+	.byte	0xbe, 0xcb, 0xb6, 0x39 ; \
-+	.byte	0x8c, 0xcc, 0x04, 0x00 ; \
-+	.byte	0x88, 0xcc, 0x84, 0x20 ; \
-+	.byte	0xb2, 0xcc, 0x3e, 0x1d ; \
-+	.byte	0xb6, 0xcc, 0xbe, 0x3d ; \
-+	.byte	0x80, 0xcd, 0x08, 0x06 ; \
-+	.byte	0x84, 0xcd, 0x88, 0x26 ; \
-+	.byte	0xba, 0xcd, 0x36, 0x19 ; \
-+	.byte	0xbe, 0xcd, 0xb6, 0x39 ; \
-+	.byte	0x8c, 0xce, 0x04, 0x00 ; \
-+	.byte	0x88, 0xce, 0x84, 0x20 ; \
-+	.byte	0xb2, 0xce, 0x3e, 0x1d ; \
-+	.byte	0xb6, 0xce, 0xbe, 0x3d ; \
-+	.byte	0x80, 0xcf, 0x08, 0x06 ; \
-+	.byte	0x84, 0xcf, 0x88, 0x26 ; \
-+	.byte	0xba, 0xcf, 0x36, 0x19 ; \
-+	.byte	0xbe, 0xcf, 0xb6, 0x39 ; \
-+	.byte	0x8c, 0xc8, 0x44, 0x00 ; \
-+	.byte	0x88, 0xc8, 0xc4, 0x20 ; \
-+	.byte	0xb2, 0xc8, 0x7e, 0x1d ; \
-+	.byte	0xb6, 0xc8, 0xfe, 0x3d ; \
-+	.byte	0x80, 0xc9, 0x48, 0x06 ; \
-+	.byte	0x84, 0xc9, 0xc8, 0x26 ; \
-+	.byte	0xba, 0xc9, 0x76, 0x19 ; \
-+	.byte	0xbe, 0xc9, 0xf6, 0x39 ; \
-+	.byte	0x8c, 0xca, 0x44, 0x00 ; \
-+	.byte	0x88, 0xca, 0xc4, 0x20 ; \
-+	.byte	0xb2, 0xca, 0x7e, 0x1d ; \
-+	.byte	0xb6, 0xca, 0xfe, 0x3d ; \
-+	.byte	0x80, 0xcb, 0x48, 0x06 ; \
-+	.byte	0x84, 0xcb, 0xc8, 0x26 ; \
-+	.byte	0xba, 0xcb, 0x76, 0x19 ; \
-+	.byte	0xbe, 0xcb, 0xf6, 0x39 ; \
-+	.byte	0x8c, 0xcc, 0x44, 0x00 ; \
-+	.byte	0x88, 0xcc, 0xc4, 0x20 ; \
-+	ldd	[%o0 + 0x10], %f0 ; \
-+	.byte	0xb2, 0xcc, 0x7e, 0x1d ; \
-+	ldd	[%o0 + 0x18], %f2 ; \
-+	.byte	0xb6, 0xcc, 0xfe, 0x3d ; \
-+	.byte	0xa8, 0xcd, 0x48, 0x86 ; \
-+	.byte	0xac, 0xcd, 0xc8, 0xa6 ; \
-+	ldd	[%o0 + 0x20], %f4 ; \
-+	.byte	0xba, 0xcd, 0x76, 0x99 ; \
-+	ldd	[%o0 + 0x28], %f6 ; \
-+	.byte	0xbe, 0xcd, 0xf6, 0xb9
-+	!aes_eround01	%f0, %f20, %f22, %f24 ; \
-+	!aes_eround23	%f2, %f20, %f22, %f22 ; \
-+	!ldd	[%o0 + 0x60], %f20 ; \
-+	!aes_eround01	%f0, %f60, %f62, %f56 ; \
-+	!aes_eround23	%f2, %f60, %f62, %f58 ; \
-+	!aes_eround01	%f4, %f24, %f22, %f0 ; \
-+	!aes_eround23	%f6, %f24, %f22, %f2 ; \
-+	!ldd	[%o0 + 0x68], %f22 ; \
-+	!aes_eround01	%f4, %f56, %f58, %f60 ; \
-+	!ldd	[%o0 + 0x70], %f24 ; \
-+	!aes_eround23	%f6, %f56, %f58, %f62 ; \
-+	!aes_eround01	%f8, %f0, %f2, %f6 ; \
-+	!aes_eround23	%f10, %f0, %f2, %f4 ; \
-+	!aes_eround01	%f8, %f60, %f62, %f56 ; \
-+	!aes_eround23	%f10, %f60, %f62, %f58 ; \
-+	!aes_eround01	%f12, %f6, %f4, %f0 ; \
-+	!aes_eround23	%f14, %f6, %f4, %f2 ; \
-+	!aes_eround01	%f12, %f56, %f58, %f60 ; \
-+	!aes_eround23	%f14, %f56, %f58, %f62 ; \
-+	!aes_eround01	%f16, %f0, %f2, %f6 ; \
-+	!aes_eround23	%f18, %f0, %f2, %f4 ; \
-+	!aes_eround01	%f16, %f60, %f62, %f56 ; \
-+	!aes_eround23	%f18, %f60, %f62, %f58 ; \
-+	!aes_eround01	%f20, %f6, %f4, %f0 ; \
-+	!aes_eround23	%f22, %f6, %f4, %f2 ; \
-+	!aes_eround01	%f20, %f56, %f58, %f60 ; \
-+	!aes_eround23	%f22, %f56, %f58, %f62 ; \
-+	!aes_eround01	%f24, %f0, %f2, %f6 ; \
-+	!aes_eround23	%f26, %f0, %f2, %f4 ; \
-+	!aes_eround01	%f24, %f60, %f62, %f56 ; \
-+	!aes_eround23	%f26, %f60, %f62, %f58 ; \
-+	!aes_eround01	%f28, %f6, %f4, %f0 ; \
-+	!aes_eround23	%f30, %f6, %f4, %f2 ; \
-+	!aes_eround01	%f28, %f56, %f58, %f60 ; \
-+	!aes_eround23	%f30, %f56, %f58, %f62 ; \
-+	!aes_eround01	%f32, %f0, %f2, %f6 ; \
-+	!aes_eround23	%f34, %f0, %f2, %f4 ; \
-+	!aes_eround01	%f32, %f60, %f62, %f56 ; \
-+	!aes_eround23	%f34, %f60, %f62, %f58 ; \
-+	!aes_eround01	%f36, %f6, %f4, %f0 ; \
-+	!aes_eround23	%f38, %f6, %f4, %f2 ; \
-+	!aes_eround01	%f36, %f56, %f58, %f60 ; \
-+	!aes_eround23	%f38, %f56, %f58, %f62 ; \
-+	!aes_eround01	%f40, %f0, %f2, %f6 ; \
-+	!aes_eround23	%f42, %f0, %f2, %f4 ; \
-+	!aes_eround01	%f40, %f60, %f62, %f56 ; \
-+	!aes_eround23	%f42, %f60, %f62, %f58 ; \
-+	!aes_eround01	%f44, %f6, %f4, %f0 ; \
-+	!aes_eround23	%f46, %f6, %f4, %f2 ; \
-+	!aes_eround01	%f44, %f56, %f58, %f60 ; \
-+	!aes_eround23	%f46, %f56, %f58, %f62 ; \
-+	!aes_eround01	%f48, %f0, %f2, %f6 ; \
-+	!aes_eround23	%f50, %f0, %f2, %f4 ; \
-+	!ldd	[%o0 + 0x10], %f0 ; \
-+	!aes_eround01	%f48, %f60, %f62, %f56 ; \
-+	!ldd	[%o0 + 0x18], %f2 ; \
-+	!aes_eround23	%f50, %f60, %f62, %f58 ; \
-+	!aes_eround01_l	%f52, %f6, %f4, %f20 ; \
-+	!aes_eround23_l	%f54, %f6, %f4, %f22 ; \
-+	!ldd	[%o0 + 0x20], %f4 ; \
-+	!aes_eround01_l	%f52, %f56, %f58, %f60 ; \
-+	!ldd	[%o0 + 0x28], %f6 ; \
-+	!aes_eround23_l	%f54, %f56, %f58, %f62
-+
-+#define	FIRST_TWO_DROUNDS \
-+	.byte	0xb2, 0xc8, 0x3e, 0x5d ; \
-+	.byte	0xb6, 0xc8, 0xbe, 0x7d ; \
-+	.byte	0xba, 0xc9, 0x36, 0x59 ; \
-+	.byte	0xbe, 0xc9, 0xb6, 0x79
-+	!aes_dround01	%f0, %f60, %f62, %f56 ; \
-+	!aes_dround23	%f2, %f60, %f62, %f58 ; \
-+	!aes_dround01	%f4, %f56, %f58, %f60 ; \
-+	!aes_dround23	%f6, %f56, %f58, %f62
-+
-+#define	MID_TWO_DROUNDS \
-+	.byte	0xb2, 0xca, 0x3e, 0x5d ; \
-+	.byte	0xb6, 0xca, 0xbe, 0x7d ; \
-+	.byte	0xba, 0xcb, 0x36, 0x59 ; \
-+	.byte	0xbe, 0xcb, 0xb6, 0x79
-+	!aes_dround01	%f8, %f60, %f62, %f56 ; \
-+	!aes_dround23	%f10, %f60, %f62, %f58 ; \
-+	!aes_dround01	%f12, %f56, %f58, %f60 ; \
-+	!aes_dround23	%f14, %f56, %f58, %f62
-+
-+#define	MID_TWO_DROUNDS_2 \
-+	.byte	0x8c, 0xca, 0x04, 0x40 ; \
-+	.byte	0x88, 0xca, 0x84, 0x60 ; \
-+	.byte	0xb2, 0xca, 0x3e, 0x5d ; \
-+	.byte	0xb6, 0xca, 0xbe, 0x7d ; \
-+	.byte	0x80, 0xcb, 0x08, 0x46 ; \
-+	.byte	0x84, 0xcb, 0x88, 0x66 ; \
-+	.byte	0xba, 0xcb, 0x36, 0x59 ; \
-+	.byte	0xbe, 0xcb, 0xb6, 0x79
-+	!aes_dround01	%f8, %f0, %f2, %f6 ; \
-+	!aes_dround23	%f10, %f0, %f2, %f4 ; \
-+	!aes_dround01	%f8, %f60, %f62, %f56 ; \
-+	!aes_dround23	%f10, %f60, %f62, %f58 ; \
-+	!aes_dround01	%f12, %f6, %f4, %f0 ; \
-+	!aes_dround23	%f14, %f6, %f4, %f2 ; \
-+	!aes_dround01	%f12, %f56, %f58, %f60 ; \
-+	!aes_dround23	%f14, %f56, %f58, %f62
-+
-+#define	TEN_DROUNDS \
-+	.byte	0xb2, 0xcc, 0x3e, 0x5d ; \
-+	.byte	0xb6, 0xcc, 0xbe, 0x7d ; \
-+	.byte	0xba, 0xcd, 0x36, 0x59 ; \
-+	.byte	0xbe, 0xcd, 0xb6, 0x79 ; \
-+	.byte	0xb2, 0xce, 0x3e, 0x5d ; \
-+	.byte	0xb6, 0xce, 0xbe, 0x7d ; \
-+	.byte	0xba, 0xcf, 0x36, 0x59 ; \
-+	.byte	0xbe, 0xcf, 0xb6, 0x79 ; \
-+	.byte	0xb2, 0xc8, 0x7e, 0x5d ; \
-+	.byte	0xb6, 0xc8, 0xfe, 0x7d ; \
-+	.byte	0xba, 0xc9, 0x76, 0x59 ; \
-+	.byte	0xbe, 0xc9, 0xf6, 0x79 ; \
-+	.byte	0xb2, 0xca, 0x7e, 0x5d ; \
-+	.byte	0xb6, 0xca, 0xfe, 0x7d ; \
-+	.byte	0xba, 0xcb, 0x76, 0x59 ; \
-+	.byte	0xbe, 0xcb, 0xf6, 0x79 ; \
-+	.byte	0xb2, 0xcc, 0x7e, 0x5d ; \
-+	.byte	0xb6, 0xcc, 0xfe, 0x7d ; \
-+	.byte	0xba, 0xcd, 0x76, 0xd9 ; \
-+	.byte	0xbe, 0xcd, 0xf6, 0xf9
-+	!aes_dround01	%f16, %f60, %f62, %f56 ; \
-+	!aes_dround23	%f18, %f60, %f62, %f58 ; \
-+	!aes_dround01	%f20, %f56, %f58, %f60 ; \
-+	!aes_dround23	%f22, %f56, %f58, %f62 ; \
-+	!aes_dround01	%f24, %f60, %f62, %f56 ; \
-+	!aes_dround23	%f26, %f60, %f62, %f58 ; \
-+	!aes_dround01	%f28, %f56, %f58, %f60 ; \
-+	!aes_dround23	%f30, %f56, %f58, %f62 ; \
-+	!aes_dround01	%f32, %f60, %f62, %f56 ; \
-+	!aes_dround23	%f34, %f60, %f62, %f58 ; \
-+	!aes_dround01	%f36, %f56, %f58, %f60 ; \
-+	!aes_dround23	%f38, %f56, %f58, %f62 ; \
-+	!aes_dround01	%f40, %f60, %f62, %f56 ; \
-+	!aes_dround23	%f42, %f60, %f62, %f58 ; \
-+	!aes_dround01	%f44, %f56, %f58, %f60 ; \
-+	!aes_dround23	%f46, %f56, %f58, %f62 ; \
-+	!aes_dround01	%f48, %f60, %f62, %f56 ; \
-+	!aes_dround23	%f50, %f60, %f62, %f58 ; \
-+	!aes_dround01_l	%f52, %f56, %f58, %f60 ; \
-+	!aes_dround23_l	%f54, %f56, %f58, %f62
-+
-+#define	TEN_DROUNDS_2 \
-+	.byte	0x8c, 0xcc, 0x04, 0x40 ; \
-+	.byte	0x88, 0xcc, 0x84, 0x60 ; \
-+	.byte	0xb2, 0xcc, 0x3e, 0x5d ; \
-+	.byte	0xb6, 0xcc, 0xbe, 0x7d ; \
-+	.byte	0x80, 0xcd, 0x08, 0x46 ; \
-+	.byte	0x84, 0xcd, 0x88, 0x66 ; \
-+	.byte	0xba, 0xcd, 0x36, 0x59 ; \
-+	.byte	0xbe, 0xcd, 0xb6, 0x79 ; \
-+	.byte	0x8c, 0xce, 0x04, 0x40 ; \
-+	.byte	0x88, 0xce, 0x84, 0x60 ; \
-+	.byte	0xb2, 0xce, 0x3e, 0x5d ; \
-+	.byte	0xb6, 0xce, 0xbe, 0x7d ; \
-+	.byte	0x80, 0xcf, 0x08, 0x46 ; \
-+	.byte	0x84, 0xcf, 0x88, 0x66 ; \
-+	.byte	0xba, 0xcf, 0x36, 0x59 ; \
-+	.byte	0xbe, 0xcf, 0xb6, 0x79 ; \
-+	.byte	0x8c, 0xc8, 0x44, 0x40 ; \
-+	.byte	0x88, 0xc8, 0xc4, 0x60 ; \
-+	.byte	0xb2, 0xc8, 0x7e, 0x5d ; \
-+	.byte	0xb6, 0xc8, 0xfe, 0x7d ; \
-+	.byte	0x80, 0xc9, 0x48, 0x46 ; \
-+	.byte	0x84, 0xc9, 0xc8, 0x66 ; \
-+	.byte	0xba, 0xc9, 0x76, 0x59 ; \
-+	.byte	0xbe, 0xc9, 0xf6, 0x79 ; \
-+	.byte	0x8c, 0xca, 0x44, 0x40 ; \
-+	.byte	0x88, 0xca, 0xc4, 0x60 ; \
-+	.byte	0xb2, 0xca, 0x7e, 0x5d ; \
-+	.byte	0xb6, 0xca, 0xfe, 0x7d ; \
-+	.byte	0x80, 0xcb, 0x48, 0x46 ; \
-+	.byte	0x84, 0xcb, 0xc8, 0x66 ; \
-+	.byte	0xba, 0xcb, 0x76, 0x59 ; \
-+	.byte	0xbe, 0xcb, 0xf6, 0x79 ; \
-+	.byte	0x8c, 0xcc, 0x44, 0x40 ; \
-+	.byte	0x88, 0xcc, 0xc4, 0x60 ; \
-+	.byte	0xb2, 0xcc, 0x7e, 0x5d ; \
-+	.byte	0xb6, 0xcc, 0xfe, 0x7d ; \
-+	.byte	0x80, 0xcd, 0x48, 0xc6 ; \
-+	.byte	0x84, 0xcd, 0xc8, 0xe6 ; \
-+	.byte	0xba, 0xcd, 0x76, 0xd9 ; \
-+	.byte	0xbe, 0xcd, 0xf6, 0xf9
-+	!aes_dround01	%f16, %f0, %f2, %f6 ; \
-+	!aes_dround23	%f18, %f0, %f2, %f4 ; \
-+	!aes_dround01	%f16, %f60, %f62, %f56 ; \
-+	!aes_dround23	%f18, %f60, %f62, %f58 ; \
-+	!aes_dround01	%f20, %f6, %f4, %f0 ; \
-+	!aes_dround23	%f22, %f6, %f4, %f2 ; \
-+	!aes_dround01	%f20, %f56, %f58, %f60 ; \
-+	!aes_dround23	%f22, %f56, %f58, %f62 ; \
-+	!aes_dround01	%f24, %f0, %f2, %f6 ; \
-+	!aes_dround23	%f26, %f0, %f2, %f4 ; \
-+	!aes_dround01	%f24, %f60, %f62, %f56 ; \
-+	!aes_dround23	%f26, %f60, %f62, %f58 ; \
-+	!aes_dround01	%f28, %f6, %f4, %f0 ; \
-+	!aes_dround23	%f30, %f6, %f4, %f2 ; \
-+	!aes_dround01	%f28, %f56, %f58, %f60 ; \
-+	!aes_dround23	%f30, %f56, %f58, %f62 ; \
-+	!aes_dround01	%f32, %f0, %f2, %f6 ; \
-+	!aes_dround23	%f34, %f0, %f2, %f4 ; \
-+	!aes_dround01	%f32, %f60, %f62, %f56 ; \
-+	!aes_dround23	%f34, %f60, %f62, %f58 ; \
-+	!aes_dround01	%f36, %f6, %f4, %f0 ; \
-+	!aes_dround23	%f38, %f6, %f4, %f2 ; \
-+	!aes_dround01	%f36, %f56, %f58, %f60 ; \
-+	!aes_dround23	%f38, %f56, %f58, %f62 ; \
-+	!aes_dround01	%f40, %f0, %f2, %f6 ; \
-+	!aes_dround23	%f42, %f0, %f2, %f4 ; \
-+	!aes_dround01	%f40, %f60, %f62, %f56 ; \
-+	!aes_dround23	%f42, %f60, %f62, %f58 ; \
-+	!aes_dround01	%f44, %f6, %f4, %f0 ; \
-+	!aes_dround23	%f46, %f6, %f4, %f2 ; \
-+	!aes_dround01	%f44, %f56, %f58, %f60 ; \
-+	!aes_dround23	%f46, %f56, %f58, %f62 ; \
-+	!aes_dround01	%f48, %f0, %f2, %f6 ; \
-+	!aes_dround23	%f50, %f0, %f2, %f4 ; \
-+	!aes_dround01	%f48, %f60, %f62, %f56 ; \
-+	!aes_dround23	%f50, %f60, %f62, %f58 ; \
-+	!aes_dround01_l	%f52, %f6, %f4, %f0 ; \
-+	!aes_dround23_l	%f54, %f6, %f4, %f2 ; \
-+	!aes_dround01_l	%f52, %f56, %f58, %f60 ; \
-+	!aes_dround23_l	%f54, %f56, %f58, %f62
-+
-+#define	TWELVE_DROUNDS \
-+	MID_TWO_DROUNDS	; \
-+	TEN_DROUNDS
-+
-+#define	TWELVE_DROUNDS_2 \
-+	MID_TWO_DROUNDS_2	; \
-+	TEN_DROUNDS_2
-+
-+#define	FOURTEEN_DROUNDS \
-+	FIRST_TWO_DROUNDS ; \
-+	TWELVE_DROUNDS
-+
-+#define	FOURTEEN_DROUNDS_2 \
-+	.byte	0xb0, 0xc8, 0x2c, 0x54 ; \
-+	.byte	0xac, 0xc8, 0xac, 0x74 ; \
-+	ldd	[%o0 + 0x80], %f20 ; \
-+	.byte	0xb2, 0xc8, 0x3e, 0x5d ; \
-+	.byte	0xb6, 0xc8, 0xbe, 0x7d ; \
-+	.byte	0x80, 0xc9, 0x2c, 0x58 ; \
-+	.byte	0x84, 0xc9, 0xac, 0x78 ; \
-+	ldd	[%o0 + 0x88], %f22 ; \
-+	.byte	0xba, 0xc9, 0x36, 0x59 ; \
-+	ldd	[%o0 + 0x70], %f24 ; \
-+	.byte	0xbe, 0xc9, 0xb6, 0x79 ; \
-+	.byte	0x8c, 0xca, 0x04, 0x40 ; \
-+	.byte	0x88, 0xca, 0x84, 0x60 ; \
-+	.byte	0xb2, 0xca, 0x3e, 0x5d ; \
-+	.byte	0xb6, 0xca, 0xbe, 0x7d ; \
-+	.byte	0x80, 0xcb, 0x08, 0x46 ; \
-+	.byte	0x84, 0xcb, 0x88, 0x66 ; \
-+	.byte	0xba, 0xcb, 0x36, 0x59 ; \
-+	.byte	0xbe, 0xcb, 0xb6, 0x79 ; \
-+	.byte	0x8c, 0xcc, 0x04, 0x40 ; \
-+	.byte	0x88, 0xcc, 0x84, 0x60 ; \
-+	.byte	0xb2, 0xcc, 0x3e, 0x5d ; \
-+	.byte	0xb6, 0xcc, 0xbe, 0x7d ; \
-+	.byte	0x80, 0xcd, 0x08, 0x46 ; \
-+	.byte	0x84, 0xcd, 0x88, 0x66 ; \
-+	.byte	0xba, 0xcd, 0x36, 0x59 ; \
-+	.byte	0xbe, 0xcd, 0xb6, 0x79 ; \
-+	.byte	0x8c, 0xce, 0x04, 0x40 ; \
-+	.byte	0x88, 0xce, 0x84, 0x60 ; \
-+	.byte	0xb2, 0xce, 0x3e, 0x5d ; \
-+	.byte	0xb6, 0xce, 0xbe, 0x7d ; \
-+	.byte	0x80, 0xcf, 0x08, 0x46 ; \
-+	.byte	0x84, 0xcf, 0x88, 0x66 ; \
-+	.byte	0xba, 0xcf, 0x36, 0x59 ; \
-+	.byte	0xbe, 0xcf, 0xb6, 0x79 ; \
-+	.byte	0x8c, 0xc8, 0x44, 0x40 ; \
-+	.byte	0x88, 0xc8, 0xc4, 0x60 ; \
-+	.byte	0xb2, 0xc8, 0x7e, 0x5d ; \
-+	.byte	0xb6, 0xc8, 0xfe, 0x7d ; \
-+	.byte	0x80, 0xc9, 0x48, 0x46 ; \
-+	.byte	0x84, 0xc9, 0xc8, 0x66 ; \
-+	.byte	0xba, 0xc9, 0x76, 0x59 ; \
-+	.byte	0xbe, 0xc9, 0xf6, 0x79 ; \
-+	.byte	0x8c, 0xca, 0x44, 0x40 ; \
-+	.byte	0x88, 0xca, 0xc4, 0x60 ; \
-+	.byte	0xb2, 0xca, 0x7e, 0x5d ; \
-+	.byte	0xb6, 0xca, 0xfe, 0x7d ; \
-+	.byte	0x80, 0xcb, 0x48, 0x46 ; \
-+	.byte	0x84, 0xcb, 0xc8, 0x66 ; \
-+	.byte	0xba, 0xcb, 0x76, 0x59 ; \
-+	.byte	0xbe, 0xcb, 0xf6, 0x79 ; \
-+	.byte	0x8c, 0xcc, 0x44, 0x40 ; \
-+	.byte	0x88, 0xcc, 0xc4, 0x60 ; \
-+	ldd	[%o0 + 0xd0], %f0 ; \
-+	.byte	0xb2, 0xcc, 0x7e, 0x5d ; \
-+	ldd	[%o0 + 0xd8], %f2 ; \
-+	.byte	0xb6, 0xcc, 0xfe, 0x7d ; \
-+	.byte	0xa8, 0xcd, 0x48, 0xc6 ; \
-+	.byte	0xac, 0xcd, 0xc8, 0xe6 ; \
-+	ldd	[%o0 + 0xc0], %f4 ; \
-+	.byte	0xba, 0xcd, 0x76, 0xd9 ; \
-+	ldd	[%o0 + 0xc8], %f6 ; \
-+	.byte	0xbe, 0xcd, 0xf6, 0xf9
-+	!aes_dround01	%f0, %f20, %f22, %f24 ; \
-+	!aes_dround23	%f2, %f20, %f22, %f22 ; \
-+	!ldd	[%o0 + 0x80], %f20 ; \
-+	!aes_dround01	%f0, %f60, %f62, %f56 ; \
-+	!aes_dround23	%f2, %f60, %f62, %f58 ; \
-+	!aes_dround01	%f4, %f24, %f22, %f0 ; \
-+	!aes_dround23	%f6, %f24, %f22, %f2 ; \
-+	!ldd	[%o0 + 0x88], %f22 ; \
-+	!aes_dround01	%f4, %f56, %f58, %f60 ; \
-+	!ldd	[%o0 + 0x70], %f24 ; \
-+	!aes_dround23	%f6, %f56, %f58, %f62 ; \
-+	!aes_dround01	%f8, %f0, %f2, %f6 ; \
-+	!aes_dround23	%f10, %f0, %f2, %f4 ; \
-+	!aes_dround01	%f8, %f60, %f62, %f56 ; \
-+	!aes_dround23	%f10, %f60, %f62, %f58 ; \
-+	!aes_dround01	%f12, %f6, %f4, %f0 ; \
-+	!aes_dround23	%f14, %f6, %f4, %f2 ; \
-+	!aes_dround01	%f12, %f56, %f58, %f60 ; \
-+	!aes_dround23	%f14, %f56, %f58, %f62 ; \
-+	!aes_dround01	%f16, %f0, %f2, %f6 ; \
-+	!aes_dround23	%f18, %f0, %f2, %f4 ; \
-+	!aes_dround01	%f16, %f60, %f62, %f56 ; \
-+	!aes_dround23	%f18, %f60, %f62, %f58 ; \
-+	!aes_dround01	%f20, %f6, %f4, %f0 ; \
-+	!aes_dround23	%f22, %f6, %f4, %f2 ; \
-+	!aes_dround01	%f20, %f56, %f58, %f60 ; \
-+	!aes_dround23	%f22, %f56, %f58, %f62 ; \
-+	!aes_dround01	%f24, %f0, %f2, %f6 ; \
-+	!aes_dround23	%f26, %f0, %f2, %f4 ; \
-+	!aes_dround01	%f24, %f60, %f62, %f56 ; \
-+	!aes_dround23	%f26, %f60, %f62, %f58 ; \
-+	!aes_dround01	%f28, %f6, %f4, %f0 ; \
-+	!aes_dround23	%f30, %f6, %f4, %f2 ; \
-+	!aes_dround01	%f28, %f56, %f58, %f60 ; \
-+	!aes_dround23	%f30, %f56, %f58, %f62 ; \
-+	!aes_dround01	%f32, %f0, %f2, %f6 ; \
-+	!aes_dround23	%f34, %f0, %f2, %f4 ; \
-+	!aes_dround01	%f32, %f60, %f62, %f56 ; \
-+	!aes_dround23	%f34, %f60, %f62, %f58 ; \
-+	!aes_dround01	%f36, %f6, %f4, %f0 ; \
-+	!aes_dround23	%f38, %f6, %f4, %f2 ; \
-+	!aes_dround01	%f36, %f56, %f58, %f60 ; \
-+	!aes_dround23	%f38, %f56, %f58, %f62 ; \
-+	!aes_dround01	%f40, %f0, %f2, %f6 ; \
-+	!aes_dround23	%f42, %f0, %f2, %f4 ; \
-+	!aes_dround01	%f40, %f60, %f62, %f56 ; \
-+	!aes_dround23	%f42, %f60, %f62, %f58 ; \
-+	!aes_dround01	%f44, %f6, %f4, %f0 ; \
-+	!aes_dround23	%f46, %f6, %f4, %f2 ; \
-+	!aes_dround01	%f44, %f56, %f58, %f60 ; \
-+	!aes_dround23	%f46, %f56, %f58, %f62 ; \
-+	!aes_dround01	%f48, %f0, %f2, %f6 ; \
-+	!aes_dround23	%f50, %f0, %f2, %f4 ; \
-+	!ldd	[%o0 + 0xd0], %f0 ; \
-+	!aes_dround01	%f48, %f60, %f62, %f56 ; \
-+	!ldd	[%o0 + 0xd8], %f2 ; \
-+	!aes_dround23	%f50, %f60, %f62, %f58 ; \
-+	!aes_dround01_l	%f52, %f6, %f4, %f20 ; \
-+	!aes_dround23_l	%f54, %f6, %f4, %f22 ; \
-+	!ldd	[%o0 + 0xc0], %f4 ; \
-+	!aes_dround01_l	%f52, %f56, %f58, %f60 ; \
-+	!ldd	[%o0 + 0xc8], %f6 ; \
-+	!aes_dround23_l	%f54, %f56, %f58, %f62
-+
-+
-+	ENTRY(t4_aes128_load_keys_for_encrypt)
-+
-+	ldd	[%o0 + 0x10], %f16
-+	ldd	[%o0 + 0x18], %f18
-+	ldd	[%o0 + 0x20], %f20
-+	ldd	[%o0 + 0x28], %f22
-+	ldd	[%o0 + 0x30], %f24
-+	ldd	[%o0 + 0x38], %f26
-+	ldd	[%o0 + 0x40], %f28
-+	ldd	[%o0 + 0x48], %f30
-+	ldd	[%o0 + 0x50], %f32
-+	ldd	[%o0 + 0x58], %f34
-+	ldd	[%o0 + 0x60], %f36
-+	ldd	[%o0 + 0x68], %f38
-+	ldd	[%o0 + 0x70], %f40
-+	ldd	[%o0 + 0x78], %f42
-+	ldd	[%o0 + 0x80], %f44
-+	ldd	[%o0 + 0x88], %f46
-+	ldd	[%o0 + 0x90], %f48
-+	ldd	[%o0 + 0x98], %f50
-+	ldd	[%o0 + 0xa0], %f52
-+	retl
-+	ldd	[%o0 + 0xa8], %f54
-+
-+	SET_SIZE(t4_aes128_load_keys_for_encrypt)
-+
-+
-+	ENTRY(t4_aes192_load_keys_for_encrypt)
-+
-+	ldd	[%o0 + 0x10], %f8
-+	ldd	[%o0 + 0x18], %f10
-+	ldd	[%o0 + 0x20], %f12
-+	ldd	[%o0 + 0x28], %f14
-+	ldd	[%o0 + 0x30], %f16
-+	ldd	[%o0 + 0x38], %f18
-+	ldd	[%o0 + 0x40], %f20
-+	ldd	[%o0 + 0x48], %f22
-+	ldd	[%o0 + 0x50], %f24
-+	ldd	[%o0 + 0x58], %f26
-+	ldd	[%o0 + 0x60], %f28
-+	ldd	[%o0 + 0x68], %f30
-+	ldd	[%o0 + 0x70], %f32
-+	ldd	[%o0 + 0x78], %f34
-+	ldd	[%o0 + 0x80], %f36
-+	ldd	[%o0 + 0x88], %f38
-+	ldd	[%o0 + 0x90], %f40
-+	ldd	[%o0 + 0x98], %f42
-+	ldd	[%o0 + 0xa0], %f44
-+	ldd	[%o0 + 0xa8], %f46
-+	ldd	[%o0 + 0xb0], %f48
-+	ldd	[%o0 + 0xb8], %f50
-+	ldd	[%o0 + 0xc0], %f52
-+	retl
-+	ldd	[%o0 + 0xc8], %f54
-+
-+	SET_SIZE(t4_aes192_load_keys_for_encrypt)
-+
-+
-+	ENTRY(t4_aes256_load_keys_for_encrypt)
-+
-+	ldd	[%o0 + 0x10], %f0
-+	ldd	[%o0 + 0x18], %f2
-+	ldd	[%o0 + 0x20], %f4
-+	ldd	[%o0 + 0x28], %f6
-+	ldd	[%o0 + 0x30], %f8
-+	ldd	[%o0 + 0x38], %f10
-+	ldd	[%o0 + 0x40], %f12
-+	ldd	[%o0 + 0x48], %f14
-+	ldd	[%o0 + 0x50], %f16
-+	ldd	[%o0 + 0x58], %f18
-+	ldd	[%o0 + 0x60], %f20
-+	ldd	[%o0 + 0x68], %f22
-+	ldd	[%o0 + 0x70], %f24
-+	ldd	[%o0 + 0x78], %f26
-+	ldd	[%o0 + 0x80], %f28
-+	ldd	[%o0 + 0x88], %f30
-+	ldd	[%o0 + 0x90], %f32
-+	ldd	[%o0 + 0x98], %f34
-+	ldd	[%o0 + 0xa0], %f36
-+	ldd	[%o0 + 0xa8], %f38
-+	ldd	[%o0 + 0xb0], %f40
-+	ldd	[%o0 + 0xb8], %f42
-+	ldd	[%o0 + 0xc0], %f44
-+	ldd	[%o0 + 0xc8], %f46
-+	ldd	[%o0 + 0xd0], %f48
-+	ldd	[%o0 + 0xd8], %f50
-+	ldd	[%o0 + 0xe0], %f52
-+	retl
-+	ldd	[%o0 + 0xe8], %f54
-+
-+	SET_SIZE(t4_aes256_load_keys_for_encrypt)
-+
-+
-+#define	TEST_PARALLEL_ECB_ENCRYPT
-+#ifdef	TEST_PARALLEL_ECB_ENCRYPT
-+	ENTRY(t4_aes128_ecb_encrypt)
-+
-+	ldx	[%o0], %g1	! ks[0]
-+	ldx	[%o0 + 8], %g2	! ks[1]
-+	and	%o3, 16, %o4
-+	brz	%o4, ecbenc128_loop
-+	nop
-+
-+	ldx	[%o1], %g3	!input
-+	ldx	[%o1 + 8], %g4	!input
-+	xor	%g1, %g3, %g3	!input ^ ks[0-1]
-+	xor	%g2, %g4, %g4	!input ^ ks[0-1]
-+	movxtod	%g3, %f60
-+	movxtod	%g4, %f62
-+
-+	TEN_EROUNDS
-+
-+	std	%f60, [%o2]
-+	std	%f62, [%o2 + 8]
-+
-+	add	%o1, 16, %o1
-+	subcc	%o3, 16, %o3
-+	be	ecbenc128_loop_end
-+	add	%o2, 16, %o2
-+
-+ecbenc128_loop:
-+	ldx	[%o1], %g3	!input
-+	ldx	[%o1 + 8], %g4	!input
-+	xor	%g1, %g3, %g3	!input ^ ks[0-1]
-+	xor	%g2, %g4, %g4	!input ^ ks[0-1]
-+	movxtod	%g3, %f0
-+	movxtod	%g4, %f2
-+	ldx	[%o1 + 16], %g3	!input
-+	ldx	[%o1 + 24], %g4	!input
-+	xor	%g1, %g3, %g3	!input ^ ks[0-1]
-+	xor	%g2, %g4, %g4	!input ^ ks[0-1]
-+	movxtod	%g3, %f60
-+	movxtod	%g4, %f62
-+
-+	TEN_EROUNDS_2
-+
-+	std	%f0, [%o2]
-+	std	%f2, [%o2 + 8]
-+
-+	std	%f60, [%o2 + 16]
-+	std	%f62, [%o2 + 24]
-+
-+	add	%o1, 32, %o1
-+	subcc	%o3, 32, %o3
-+	bne	ecbenc128_loop
-+	add	%o2, 32, %o2
-+ecbenc128_loop_end:
-+	retl
-+	nop
-+
-+	SET_SIZE(t4_aes128_ecb_encrypt)
-+
-+
-+	ENTRY(t4_aes192_ecb_encrypt)
-+
-+	ldx	[%o0], %g1	! ks[0]
-+	ldx	[%o0 + 8], %g2	! ks[1]
-+	and	%o3, 16, %o4
-+	brz	%o4, ecbenc192_loop
-+	nop
-+
-+	ldx	[%o1], %g3	!input
-+	ldx	[%o1 + 8], %g4	!input
-+	xor	%g1, %g3, %g3	!input ^ ks[0-1]
-+	xor	%g2, %g4, %g4	!input ^ ks[0-1]
-+	movxtod	%g3, %f60
-+	movxtod	%g4, %f62
-+
-+	TWELVE_EROUNDS
-+
-+	std	%f60, [%o2]
-+	std	%f62, [%o2 + 8]
-+
-+	add	%o1, 16, %o1
-+	subcc	%o3, 16, %o3
-+	be	ecbenc192_loop_end
-+	add	%o2, 16, %o2
-+
-+ecbenc192_loop:
-+	ldx	[%o1], %g3	!input
-+	ldx	[%o1 + 8], %g4	!input
-+	xor	%g1, %g3, %g3	!input ^ ks[0-1]
-+	xor	%g2, %g4, %g4	!input ^ ks[0-1]
-+	movxtod	%g3, %f0
-+	movxtod	%g4, %f2
-+	ldx	[%o1 + 16], %g3	!input
-+	ldx	[%o1 + 24], %g4	!input
-+	xor	%g1, %g3, %g3	!input ^ ks[0-1]
-+	xor	%g2, %g4, %g4	!input ^ ks[0-1]
-+	movxtod	%g3, %f60
-+	movxtod	%g4, %f62
-+
-+	TWELVE_EROUNDS_2
-+
-+	std	%f0, [%o2]
-+	std	%f2, [%o2 + 8]
-+
-+	std	%f60, [%o2 + 16]
-+	std	%f62, [%o2 + 24]
-+
-+	add	%o1, 32, %o1
-+	subcc	%o3, 32, %o3
-+	bne	ecbenc192_loop
-+	add	%o2, 32, %o2
-+ecbenc192_loop_end:
-+	retl
-+	nop
-+
-+	SET_SIZE(t4_aes192_ecb_encrypt)
-+
-+
-+	ENTRY(t4_aes256_ecb_encrypt)
-+
-+	ldx	[%o0], %g1	! ks[0]
-+	ldx	[%o0 + 8], %g2	! ks[1]
-+	and	%o3, 16, %o4
-+	brz	%o4, ecbenc256_loop
-+	nop
-+
-+	ldx	[%o1], %g3	!input
-+	ldx	[%o1 + 8], %g4	!input
-+	xor	%g1, %g3, %g3	!input ^ ks[0-1]
-+	xor	%g2, %g4, %g4	!input ^ ks[0-1]
-+	movxtod	%g3, %f60
-+	movxtod	%g4, %f62
-+
-+	FOURTEEN_EROUNDS
-+
-+	std	%f60, [%o2]
-+	std	%f62, [%o2 + 8]
-+
-+	add	%o1, 16, %o1
-+	subcc	%o3, 16, %o3
-+	be	ecbenc256_loop_end
-+	add	%o2, 16, %o2
-+
-+ecbenc256_loop:
-+	ldx	[%o1], %g3	!input
-+	ldx	[%o1 + 8], %g4	!input
-+	xor	%g1, %g3, %g3	!input ^ ks[0-1]
-+	xor	%g2, %g4, %g4	!input ^ ks[0-1]
-+	movxtod	%g3, %f20
-+	movxtod	%g4, %f22
-+	ldx	[%o1 + 16], %g3	!input
-+	ldx	[%o1 + 24], %g4	!input
-+	xor	%g1, %g3, %g3	!input ^ ks[0-1]
-+	xor	%g2, %g4, %g4	!input ^ ks[0-1]
-+	movxtod	%g3, %f60
-+	movxtod	%g4, %f62
-+
-+	FOURTEEN_EROUNDS_2
-+
-+	std	%f20, [%o2]
-+	std	%f22, [%o2 + 8]
-+
-+	std	%f60, [%o2 + 16]
-+	std	%f62, [%o2 + 24]
-+
-+	add	%o1, 32, %o1
-+	subcc	%o3, 32, %o3
-+	bne	ecbenc256_loop
-+	add	%o2, 32, %o2
-+
-+	ldd	[%o0 + 0x60], %f20
-+	ldd	[%o0 + 0x68], %f22
-+
-+ecbenc256_loop_end:
-+	retl
-+	nop
-+
-+	SET_SIZE(t4_aes256_ecb_encrypt)
-+
-+#else
-+
-+	ENTRY(t4_aes128_ecb_encrypt)
-+
-+	ldx	[%o0], %g1	! ks[0]
-+	ldx	[%o0 + 8], %g2	! ks[1]
-+
-+ecbenc128_loop:
-+	ldx	[%o1], %g3	!input
-+	ldx	[%o1 + 8], %g4	!input
-+	xor	%g1, %g3, %g3	!input ^ ks[0-1]
-+	xor	%g2, %g4, %g4	!input ^ ks[0-1]
-+	movxtod	%g3, %f60
-+	movxtod	%g4, %f62
-+
-+	TEN_EROUNDS
-+
-+	std	%f60, [%o2]
-+	std	%f62, [%o2 + 8]
-+
-+	add	%o1, 16, %o1
-+	subcc	%o3, 16, %o3
-+	bne	ecbenc128_loop
-+	add	%o2, 16, %o2
-+
-+	retl
-+	nop
-+
-+	SET_SIZE(t4_aes128_ecb_encrypt)
-+
-+
-+	ENTRY(t4_aes192_ecb_encrypt)
-+
-+	ldx	[%o0], %g1	! ks[0]
-+	ldx	[%o0 + 8], %g2	! ks[1]
-+
-+ecbenc192_loop:
-+	ldx	[%o1], %g3	!input
-+	ldx	[%o1 + 8], %g4	!input
-+	xor	%g1, %g3, %g3	!input ^ ks[0-1]
-+	xor	%g2, %g4, %g4	!input ^ ks[0-1]
-+	movxtod	%g3, %f60
-+	movxtod	%g4, %f62
-+
-+	TWELVE_EROUNDS
-+
-+	std	%f60, [%o2]
-+	std	%f62, [%o2 + 8]
-+
-+	add	%o1, 16, %o1
-+	subcc	%o3, 16, %o3
-+	bne	ecbenc192_loop
-+	add	%o2, 16, %o2
-+
-+	retl
-+	nop
-+
-+	SET_SIZE(t4_aes192_ecb_encrypt)
-+
-+
-+	ENTRY(t4_aes256_ecb_encrypt)
-+
-+	ldx	[%o0], %g1	! ks[0]
-+	ldx	[%o0 + 8], %g2	! ks[1]
-+
-+ecbenc256_loop:
-+	ldx	[%o1], %g3	!input
-+	ldx	[%o1 + 8], %g4	!input
-+	xor	%g1, %g3, %g3	!input ^ ks[0-1]
-+	xor	%g2, %g4, %g4	!input ^ ks[0-1]
-+	movxtod	%g3, %f60
-+	movxtod	%g4, %f62
-+
-+	FOURTEEN_EROUNDS
-+
-+	std	%f60, [%o2]
-+	std	%f62, [%o2 + 8]
-+
-+	add	%o1, 16, %o1
-+	subcc	%o3, 16, %o3
-+	bne	ecbenc256_loop
-+	add	%o2, 16, %o2
-+
-+	retl
-+	nop
-+
-+	SET_SIZE(t4_aes256_ecb_encrypt)
-+#endif
-+
-+
-+	ENTRY(t4_aes128_cbc_encrypt)
-+
-+	ldd	[%o4], %f60	! IV
-+	ldd	[%o4 +8], %f62	! IV
-+	ldx	[%o0], %g1	! ks[0]
-+	ldx	[%o0 + 8], %g2	! ks[1]
-+
-+cbcenc128_loop:
-+	ldx	[%o1], %g3	!input
-+	ldx	[%o1 + 8], %g4	!input
-+	xor	%g1, %g3, %g3	!input ^ ks[0-1]
-+	xor	%g2, %g4, %g4	!input ^ ks[0-1]
-+	movxtod	%g3, %f56
-+	movxtod	%g4, %f58
-+	fxor	%f60, %f56, %f60
-+	fxor	%f62, %f58, %f62
-+
-+	TEN_EROUNDS
-+
-+	std	%f60, [%o2]
-+	std	%f62, [%o2 + 8]
-+
-+	add	%o1, 16, %o1
-+	subcc	%o3, 16, %o3
-+	bne	cbcenc128_loop
-+	add	%o2, 16, %o2
-+
-+	std	%f60, [%o4]
-+	retl
-+	std	%f62, [%o4 + 8]
-+
-+	SET_SIZE(t4_aes128_cbc_encrypt)
-+
-+
-+	ENTRY(t4_aes192_cbc_encrypt)
-+
-+	ldd	[%o4], %f60	! IV
-+	ldd	[%o4 + 8], %f62	! IV
-+	ldx	[%o0], %g1	! ks[0]
-+	ldx	[%o0 + 8], %g2	! ks[1]
-+
-+cbcenc192_loop:
-+	ldx	[%o1], %g3	!input
-+	ldx	[%o1 + 8], %g4	!input
-+	xor	%g1, %g3, %g3	!input ^ ks[0-1]
-+	xor	%g2, %g4, %g4	!input ^ ks[0-1]
-+	movxtod	%g3, %f56
-+	movxtod	%g4, %f58
-+	fxor	%f60, %f56, %f60
-+	fxor	%f62, %f58, %f62
-+
-+	TWELVE_EROUNDS
-+
-+	std	%f60, [%o2]
-+	std	%f62, [%o2 + 8]
-+
-+	add	%o1, 16, %o1
-+	subcc	%o3, 16, %o3
-+	bne	cbcenc192_loop
-+	add	%o2, 16, %o2
-+
-+	std	%f60, [%o4]
-+	retl
-+	std	%f62, [%o4 + 8]
-+
-+	SET_SIZE(t4_aes192_cbc_encrypt)
-+
-+
-+	ENTRY(t4_aes256_cbc_encrypt)
-+
-+	ldd	[%o4], %f60	! IV
-+	ldd	[%o4 + 8], %f62	! IV
-+	ldx	[%o0], %g1	! ks[0]
-+	ldx	[%o0 + 8], %g2	! ks[1]
-+
-+cbcenc256_loop:
-+	ldx	[%o1], %g3	!input
-+	ldx	[%o1 + 8], %g4	!input
-+	xor	%g1, %g3, %g3	!input ^ ks[0-1]
-+	xor	%g2, %g4, %g4	!input ^ ks[0-1]
-+	movxtod	%g3, %f56
-+	movxtod	%g4, %f58
-+	fxor	%f60, %f56, %f60
-+	fxor	%f62, %f58, %f62
-+
-+	FOURTEEN_EROUNDS
-+
-+	std	%f60, [%o2]
-+	std	%f62, [%o2 + 8]
-+
-+	add	%o1, 16, %o1
-+	subcc	%o3, 16, %o3
-+	bne	cbcenc256_loop
-+	add	%o2, 16, %o2
-+
-+	std	%f60, [%o4]
-+	retl
-+	std	%f62, [%o4 + 8]
-+
-+	SET_SIZE(t4_aes256_cbc_encrypt)
-+
-+
-+#define	 TEST_PARALLEL_CTR_CRYPT
-+#ifdef	TEST_PARALLEL_CTR_CRYPT
-+	ENTRY(t4_aes128_ctr_crypt)
-+
-+	ldx	[%o4], %g3	! IV
-+	ldx	[%o4 +8], %g4	! IV
-+	ldx	[%o0], %g1	! ks[0]
-+	ldx	[%o0 + 8], %g2	! ks[1]
-+	and	%o3, 16, %g5
-+	brz, %g5, ctr128_loop
-+
-+	xor	%g1, %g3, %g5
-+	movxtod	%g5, %f60
-+	xor	%g2, %g4, %g5
-+	movxtod	%g5, %f62
-+	inc	%g4
-+
-+	TEN_EROUNDS
-+
-+	ldd	[%o1], %f56	!input
-+	ldd	[%o1 + 8], %f58	!input
-+	fxor	%f60, %f56, %f60
-+	fxor	%f62, %f58, %f62
-+	std	%f60, [%o2]
-+	std	%f62, [%o2 + 8]
-+
-+	add	%o1, 16, %o1
-+	subcc	%o3, 16, %o3
-+	be	ctr128_loop_end
-+	add	%o2, 16, %o2
-+
-+ctr128_loop:
-+	xor	%g1, %g3, %g5
-+	movxtod	%g5, %f0
-+	xor	%g2, %g4, %g5
-+	movxtod	%g5, %f2
-+	inc	%g4
-+
-+	xor	%g1, %g3, %g5
-+	movxtod	%g5, %f60
-+	xor	%g2, %g4, %g5
-+	movxtod	%g5, %f62
-+	inc	%g4
-+
-+	TEN_EROUNDS_2
-+
-+	ldd	[%o1], %f6		!input
-+	ldd	[%o1 + 8], %f4		!input
-+	ldd	[%o1 + 16], %f56	!input
-+	ldd	[%o1 + 24], %f58	!input
-+	fxor	%f0, %f6, %f0
-+	fxor	%f2, %f4, %f2
-+	fxor	%f60, %f56, %f60
-+	fxor	%f62, %f58, %f62
-+	std	%f0, [%o2]
-+	std	%f2, [%o2 + 8]
-+	std	%f60, [%o2 + 16]
-+	std	%f62, [%o2 + 24]
-+
-+	add	%o1, 32, %o1
-+	subcc	%o3, 32, %o3
-+	bne	ctr128_loop
-+	add	%o2, 32, %o2
-+
-+ctr128_loop_end:
-+	stx	%g3, [%o4]
-+	retl
-+	stx	%g4, [%o4 + 8]
-+
-+	SET_SIZE(t4_aes128_ctr_crypt)
-+
-+
-+	ENTRY(t4_aes192_ctr_crypt)
-+
-+	ldx	[%o4], %g3	! IV
-+	ldx	[%o4 +8], %g4	! IV
-+	ldx	[%o0], %g1	! ks[0]
-+	ldx	[%o0 + 8], %g2	! ks[1]
-+	and	%o3, 16, %g5
-+	brz, %g5, ctr192_loop
-+
-+	xor	%g1, %g3, %g5
-+	movxtod	%g5, %f60
-+	xor	%g2, %g4, %g5
-+	movxtod	%g5, %f62
-+	inc	%g4
-+
-+	TWELVE_EROUNDS
-+
-+	ldd	[%o1], %f56	!input
-+	ldd	[%o1 + 8], %f58	!input
-+	fxor	%f60, %f56, %f60
-+	fxor	%f62, %f58, %f62
-+	std	%f60, [%o2]
-+	std	%f62, [%o2 + 8]
-+
-+	add	%o1, 16, %o1
-+	subcc	%o3, 16, %o3
-+	be	ctr192_loop_end
-+	add	%o2, 16, %o2
-+
-+ctr192_loop:
-+	xor	%g1, %g3, %g5
-+	movxtod	%g5, %f0
-+	xor	%g2, %g4, %g5
-+	movxtod	%g5, %f2
-+	inc	%g4
-+
-+	xor	%g1, %g3, %g5
-+	movxtod	%g5, %f60
-+	xor	%g2, %g4, %g5
-+	movxtod	%g5, %f62
-+	inc	%g4
-+
-+	TWELVE_EROUNDS_2
-+
-+	ldd	[%o1], %f6		!input
-+	ldd	[%o1 + 8], %f4		!input
-+	ldd	[%o1 + 16], %f56	!input
-+	ldd	[%o1 + 24], %f58	!input
-+	fxor	%f0, %f6, %f0
-+	fxor	%f2, %f4, %f2
-+	fxor	%f60, %f56, %f60
-+	fxor	%f62, %f58, %f62
-+	std	%f0, [%o2]
-+	std	%f2, [%o2 + 8]
-+	std	%f60, [%o2 + 16]
-+	std	%f62, [%o2 + 24]
-+
-+	add	%o1, 32, %o1
-+	subcc	%o3, 32, %o3
-+	bne	ctr192_loop
-+	add	%o2, 32, %o2
-+
-+ctr192_loop_end:
-+	stx	%g3, [%o4]
-+	retl
-+	stx	%g4, [%o4 + 8]
-+
-+	SET_SIZE(t4_aes192_ctr_crypt)
-+
-+
-+	ENTRY(t4_aes256_ctr_crypt)
-+
-+	ldx	[%o4], %g3	! IV
-+	ldx	[%o4 +8], %g4	! IV
-+	ldx	[%o0], %g1	! ks[0]
-+	ldx	[%o0 + 8], %g2	! ks[1]
-+	and	%o3, 16, %g5
-+	brz,	%g5, ctr256_loop
-+
-+	xor	%g1, %g3, %g5
-+	movxtod	%g5, %f60
-+	xor	%g2, %g4, %g5
-+	movxtod	%g5, %f62
-+	inc	%g4
-+
-+	FOURTEEN_EROUNDS
-+
-+	ldd	[%o1], %f56	!input
-+	ldd	[%o1 + 8], %f58	!input
-+	fxor	%f60, %f56, %f60
-+	fxor	%f62, %f58, %f62
-+	std	%f60, [%o2]
-+	std	%f62, [%o2 + 8]
-+
-+	add	%o1, 16, %o1
-+	subcc	%o3, 16, %o3
-+	be	ctr256_loop_end
-+	add	%o2, 16, %o2
-+
-+ctr256_loop:
-+	xor	%g1, %g3, %g5
-+	movxtod	%g5, %f20
-+	xor	%g2, %g4, %g5
-+	movxtod	%g5, %f22
-+	inc	%g4
-+
-+	xor	%g1, %g3, %g5
-+	movxtod	%g5, %f60
-+	xor	%g2, %g4, %g5
-+	movxtod	%g5, %f62
-+	inc	%g4
-+
-+	FOURTEEN_EROUNDS_2
-+
-+	ldd	[%o1], %f56		!input
-+	ldd	[%o1 + 8], %f58		!input
-+	fxor	%f20, %f56, %f20
-+	fxor	%f22, %f58, %f22
-+	ldd	[%o1 + 16], %f56	!input
-+	ldd	[%o1 + 24], %f58	!input
-+	fxor	%f60, %f56, %f60
-+	fxor	%f62, %f58, %f62
-+	std	%f20, [%o2]
-+	std	%f22, [%o2 + 8]
-+	std	%f60, [%o2 + 16]
-+	std	%f62, [%o2 + 24]
-+
-+	add	%o1, 32, %o1
-+	subcc	%o3, 32, %o3
-+	bne	ctr256_loop
-+	add	%o2, 32, %o2
-+
-+	ldd	[%o0 + 0x60], %f20
-+	ldd	[%o0 + 0x68], %f22
-+
-+ctr256_loop_end:
-+	stx	%g3, [%o4]
-+	retl
-+	stx	%g4, [%o4 + 8]
-+
-+	SET_SIZE(t4_aes256_ctr_crypt)
-+
-+#else
-+
-+	ENTRY(t4_aes128_ctr_crypt)
-+
-+	ldx	[%o4], %g3	! IV
-+	ldx	[%o4 +8], %g4	! IV
-+	ldx	[%o0], %g1	! ks[0]
-+	ldx	[%o0 + 8], %g2	! ks[1]
-+
-+ctr128_loop:
-+	xor	%g1, %g3, %g5
-+	movxtod	%g5, %f60
-+	xor	%g2, %g4, %g5
-+	movxtod	%g5, %f62
-+	inc	%g4
-+
-+	TEN_EROUNDS
-+
-+	ldd	[%o1], %f56	!input
-+	ldd	[%o1 + 8], %f58	!input
-+	fxor	%f60, %f56, %f60
-+	fxor	%f62, %f58, %f62
-+	std	%f60, [%o2]
-+	std	%f62, [%o2 + 8]
-+
-+	add	%o1, 16, %o1
-+	subcc	%o3, 16, %o3
-+	bne	ctr128_loop
-+	add	%o2, 16, %o2
-+
-+	stx	%g3, [%o4]
-+	retl
-+	stx	%g4, [%o4 + 8]
-+
-+	SET_SIZE(t4_aes128_ctr_crypt)
-+
-+	ENTRY(t4_aes192_ctr_crypt)
-+
-+	ldx	[%o4], %g3	! IV
-+	ldx	[%o4 +8], %g4	! IV
-+	ldx	[%o0], %g1	! ks[0]
-+	ldx	[%o0 + 8], %g2	! ks[1]
-+
-+ctr192_loop:
-+	xor	%g1, %g3, %g5
-+	movxtod	%g5, %f60
-+	xor	%g2, %g4, %g5
-+	movxtod	%g5, %f62
-+	inc	%g4
-+
-+	TWELVE_EROUNDS
-+
-+	ldd	[%o1], %f56	!input
-+	ldd	[%o1 + 8], %f58	!input
-+	fxor	%f60, %f56, %f60
-+	fxor	%f62, %f58, %f62
-+	std	%f60, [%o2]
-+	std	%f62, [%o2 + 8]
-+
-+	add	%o1, 16, %o1
-+	subcc	%o3, 16, %o3
-+	bne	ctr192_loop
-+	add	%o2, 16, %o2
-+
-+	stx	%g3, [%o4]
-+	retl
-+	stx	%g4, [%o4 + 8]
-+
-+	SET_SIZE(t4_aes192_ctr_crypt)
-+
-+
-+	ENTRY(t4_aes256_ctr_crypt)
-+
-+	ldx	[%o4], %g3	! IV
-+	ldx	[%o4 +8], %g4	! IV
-+	ldx	[%o0], %g1	! ks[0]
-+	ldx	[%o0 + 8], %g2	! ks[1]
-+
-+ctr256_loop:
-+	xor	%g1, %g3, %g5
-+	movxtod	%g5, %f60
-+	xor	%g2, %g4, %g5
-+	movxtod	%g5, %f62
-+	inc	%g4
-+
-+	FOURTEEN_EROUNDS
-+
-+	ldd	[%o1], %f56	!input
-+	ldd	[%o1 + 8], %f58	!input
-+	fxor	%f60, %f56, %f60
-+	fxor	%f62, %f58, %f62
-+	std	%f60, [%o2]
-+	std	%f62, [%o2 + 8]
-+
-+	add	%o1, 16, %o1
-+	subcc	%o3, 16, %o3
-+	bne	ctr256_loop
-+	add	%o2, 16, %o2
-+
-+	stx	%g3, [%o4]
-+	retl
-+	stx	%g4, [%o4 + 8]
-+
-+	SET_SIZE(t4_aes256_ctr_crypt)
-+
-+#endif
-+
-+	ENTRY(t4_aes128_cfb128_encrypt)
-+
-+	ldd	[%o4], %f60	! IV
-+	ldd	[%o4 +8], %f62	! IV
-+	ldx	[%o0], %g1	! ks[0]
-+	ldx	[%o0 + 8], %g2	! ks[1]
-+
-+cfb128_128_loop:
-+	movxtod	%g1, %f56
-+	movxtod	%g2, %f58
-+	fxor	%f60, %f56, %f60
-+	fxor	%f62, %f58, %f62
-+
-+	TEN_EROUNDS
-+
-+	ldd	[%o1], %f56	!input
-+	ldd	[%o1 + 8], %f58	!input
-+	fxor	%f60, %f56, %f60
-+	fxor	%f62, %f58, %f62
-+
-+	std	%f60, [%o2]
-+	std	%f62, [%o2 + 8]
-+
-+	add	%o1, 16, %o1
-+	subcc	%o3, 16, %o3
-+	bne	cfb128_128_loop
-+	add	%o2, 16, %o2
-+
-+	std	%f60, [%o4]
-+	retl
-+	std	%f62, [%o4 + 8]
-+
-+	SET_SIZE(t4_aes128_cfb128_encrypt)
-+
-+
-+	ENTRY(t4_aes192_cfb128_encrypt)
-+
-+	ldd	[%o4], %f60	! IV
-+	ldd	[%o4 +8], %f62	! IV
-+	ldx	[%o0], %g1	! ks[0]
-+	ldx	[%o0 + 8], %g2	! ks[1]
-+
-+cfb128_192_loop:
-+	movxtod	%g1, %f56
-+	movxtod	%g2, %f58
-+	fxor	%f60, %f56, %f60
-+	fxor	%f62, %f58, %f62
-+
-+	TWELVE_EROUNDS
-+
-+	ldd	[%o1], %f56	!input
-+	ldd	[%o1 + 8], %f58	!input
-+	fxor	%f60, %f56, %f60
-+	fxor	%f62, %f58, %f62
-+
-+	std	%f60, [%o2]
-+	std	%f62, [%o2 + 8]
-+
-+	add	%o1, 16, %o1
-+	subcc	%o3, 16, %o3
-+	bne	cfb128_192_loop
-+	add	%o2, 16, %o2
-+
-+	std	%f60, [%o4]
-+	retl
-+	std	%f62, [%o4 + 8]
-+
-+	SET_SIZE(t4_aes192_cfb128_encrypt)
-+
-+
-+	ENTRY(t4_aes256_cfb128_encrypt)
-+
-+	ldd	[%o4], %f60	! IV
-+	ldd	[%o4 +8], %f62	! IV
-+	ldx	[%o0], %g1	! ks[0]
-+	ldx	[%o0 + 8], %g2	! ks[1]
-+
-+cfb128_256_loop:
-+	movxtod	%g1, %f56
-+	movxtod	%g2, %f58
-+	fxor	%f60, %f56, %f60
-+	fxor	%f62, %f58, %f62
-+
-+	FOURTEEN_EROUNDS
-+
-+	ldd	[%o1], %f56	!input
-+	ldd	[%o1 + 8], %f58	!input
-+	fxor	%f60, %f56, %f60
-+	fxor	%f62, %f58, %f62
-+
-+	std	%f60, [%o2]
-+	std	%f62, [%o2 + 8]
-+
-+	add	%o1, 16, %o1
-+	subcc	%o3, 16, %o3
-+	bne	cfb128_256_loop
-+	add	%o2, 16, %o2
-+
-+	std	%f60, [%o4]
-+	retl
-+	std	%f62, [%o4 + 8]
-+
-+	SET_SIZE(t4_aes256_cfb128_encrypt)
-+
-+
-+	ENTRY(t4_aes128_load_keys_for_decrypt)
-+
-+	ldd	[%o0], %f52
-+	ldd	[%o0 + 0x8], %f54
-+	ldd	[%o0 + 0x10], %f48
-+	ldd	[%o0 + 0x18], %f50
-+	ldd	[%o0 + 0x20], %f44
-+	ldd	[%o0 + 0x28], %f46
-+	ldd	[%o0 + 0x30], %f40
-+	ldd	[%o0 + 0x38], %f42
-+	ldd	[%o0 + 0x40], %f36
-+	ldd	[%o0 + 0x48], %f38
-+	ldd	[%o0 + 0x50], %f32
-+	ldd	[%o0 + 0x58], %f34
-+	ldd	[%o0 + 0x60], %f28
-+	ldd	[%o0 + 0x68], %f30
-+	ldd	[%o0 + 0x70], %f24
-+	ldd	[%o0 + 0x78], %f26
-+	ldd	[%o0 + 0x80], %f20
-+	ldd	[%o0 + 0x88], %f22
-+	ldd	[%o0 + 0x90], %f16
-+	retl
-+	ldd	[%o0 + 0x98], %f18
-+
-+	SET_SIZE(t4_aes128_load_keys_for_decrypt)
-+
-+
-+	ENTRY(t4_aes192_load_keys_for_decrypt)
-+
-+	ldd	[%o0], %f52
-+	ldd	[%o0 + 0x8], %f54
-+	ldd	[%o0 + 0x10], %f48
-+	ldd	[%o0 + 0x18], %f50
-+	ldd	[%o0 + 0x20], %f44
-+	ldd	[%o0 + 0x28], %f46
-+	ldd	[%o0 + 0x30], %f40
-+	ldd	[%o0 + 0x38], %f42
-+	ldd	[%o0 + 0x40], %f36
-+	ldd	[%o0 + 0x48], %f38
-+	ldd	[%o0 + 0x50], %f32
-+	ldd	[%o0 + 0x58], %f34
-+	ldd	[%o0 + 0x60], %f28
-+	ldd	[%o0 + 0x68], %f30
-+	ldd	[%o0 + 0x70], %f24
-+	ldd	[%o0 + 0x78], %f26
-+	ldd	[%o0 + 0x80], %f20
-+	ldd	[%o0 + 0x88], %f22
-+	ldd	[%o0 + 0x90], %f16
-+	ldd	[%o0 + 0x98], %f18
-+	ldd	[%o0 + 0xa0], %f12
-+	ldd	[%o0 + 0xa8], %f14
-+	ldd	[%o0 + 0xb0], %f8
-+	retl
-+	ldd	[%o0 + 0xb8], %f10
-+
-+	SET_SIZE(t4_aes192_load_keys_for_decrypt)
-+
-+
-+	ENTRY(t4_aes256_load_keys_for_decrypt)
-+
-+
-+	ldd	[%o0], %f52
-+	ldd	[%o0 + 0x8], %f54
-+	ldd	[%o0 + 0x10], %f48
-+	ldd	[%o0 + 0x18], %f50
-+	ldd	[%o0 + 0x20], %f44
-+	ldd	[%o0 + 0x28], %f46
-+	ldd	[%o0 + 0x30], %f40
-+	ldd	[%o0 + 0x38], %f42
-+	ldd	[%o0 + 0x40], %f36
-+	ldd	[%o0 + 0x48], %f38
-+	ldd	[%o0 + 0x50], %f32
-+	ldd	[%o0 + 0x58], %f34
-+	ldd	[%o0 + 0x60], %f28
-+	ldd	[%o0 + 0x68], %f30
-+	ldd	[%o0 + 0x70], %f24
-+	ldd	[%o0 + 0x78], %f26
-+	ldd	[%o0 + 0x80], %f20
-+	ldd	[%o0 + 0x88], %f22
-+	ldd	[%o0 + 0x90], %f16
-+	ldd	[%o0 + 0x98], %f18
-+	ldd	[%o0 + 0xa0], %f12
-+	ldd	[%o0 + 0xa8], %f14
-+	ldd	[%o0 + 0xb0], %f8
-+	ldd	[%o0 + 0xb8], %f10
-+	ldd	[%o0 + 0xc0], %f4
-+	ldd	[%o0 + 0xc8], %f6
-+	ldd	[%o0 + 0xd0], %f0
-+	retl
-+	ldd	[%o0 + 0xd8], %f2
-+
-+	SET_SIZE(t4_aes256_load_keys_for_decrypt)
-+
-+
-+#define	 TEST_PARALLEL_ECB_DECRYPT
-+#ifdef	TEST_PARALLEL_ECB_DECRYPT
-+	ENTRY(t4_aes128_ecb_decrypt)
-+
-+	ldx	[%o0 + 0xa0], %g1	!ks[last-1]
-+	ldx	[%o0 + 0xa8], %g2	!ks[last]
-+	and	%o3, 16, %o4
-+	brz	%o4, ecbdec128_loop
-+	nop
-+
-+	ldx	[%o1], %o4
-+	ldx	[%o1 + 8], %o5
-+	xor	%g1, %o4, %g3	!initial ARK
-+	movxtod	%g3, %f60
-+	xor	%g2, %o5, %g3	!initial ARK
-+	movxtod	%g3, %f62
-+
-+	TEN_DROUNDS
-+
-+	std	%f60, [%o2]
-+	std	%f62, [%o2 + 0x8]
-+
-+	add	%o1, 16, %o1
-+	subcc	%o3, 16, %o3
-+	be	ecbdec128_loop_end
-+	add	%o2, 16, %o2
-+
-+ecbdec128_loop:
-+	ldx	[%o1], %o4
-+	ldx	[%o1 + 8], %o5
-+	xor	%g1, %o4, %g3	!initial ARK
-+	movxtod	%g3, %f0
-+	xor	%g2, %o5, %g3	!initial ARK
-+	movxtod	%g3, %f2
-+	ldx	[%o1 + 16], %o4
-+	ldx	[%o1 + 24], %o5
-+	xor	%g1, %o4, %g3	!initial ARK
-+	movxtod	%g3, %f60
-+	xor	%g2, %o5, %g3	!initial ARK
-+	movxtod	%g3, %f62
-+
-+	TEN_DROUNDS_2
-+
-+	std	%f0, [%o2]
-+	std	%f2, [%o2 + 8]
-+	std	%f60, [%o2 + 16]
-+	std	%f62, [%o2 + 24]
-+
-+	add	%o1, 32, %o1
-+	subcc	%o3, 32, %o3
-+	bne	ecbdec128_loop
-+	add	%o2, 32, %o2
-+ecbdec128_loop_end:
-+
-+	retl
-+	nop
-+
-+	SET_SIZE(t4_aes128_ecb_decrypt)
-+
-+	ENTRY(t4_aes192_ecb_decrypt)
-+
-+	ldx	[%o0 + 0xc0], %g1	!ks[last-1]
-+	ldx	[%o0 + 0xc8], %g2	!ks[last]
-+	and	%o3, 16, %o4
-+	brz	%o4, ecbdec192_loop
-+	nop
-+
-+	ldx	[%o1], %o4
-+	ldx	[%o1 + 8], %o5
-+	xor	%g1, %o4, %g3	!initial ARK
-+	movxtod	%g3, %f60
-+	xor	%g2, %o5, %g3	!initial ARK
-+	movxtod	%g3, %f62
-+
-+	TWELVE_DROUNDS
-+
-+	std	%f60, [%o2]
-+	std	%f62, [%o2 + 0x8]
-+
-+	add	%o1, 16, %o1
-+	subcc	%o3, 16, %o3
-+	be	ecbdec192_loop_end
-+	add	%o2, 16, %o2
-+
-+ecbdec192_loop:
-+	ldx	[%o1], %o4
-+	ldx	[%o1 + 8], %o5
-+	xor	%g1, %o4, %g3	!initial ARK
-+	movxtod	%g3, %f0
-+	xor	%g2, %o5, %g3	!initial ARK
-+	movxtod	%g3, %f2
-+	ldx	[%o1 + 16], %o4
-+	ldx	[%o1 + 24], %o5
-+	xor	%g1, %o4, %g3	!initial ARK
-+	movxtod	%g3, %f60
-+	xor	%g2, %o5, %g3	!initial ARK
-+	movxtod	%g3, %f62
-+
-+	TWELVE_DROUNDS_2
-+
-+	std	%f0, [%o2]
-+	std	%f2, [%o2 + 8]
-+	std	%f60, [%o2 + 16]
-+	std	%f62, [%o2 + 24]
-+
-+	add	%o1, 32, %o1
-+	subcc	%o3, 32, %o3
-+	bne	ecbdec192_loop
-+	add	%o2, 32, %o2
-+ecbdec192_loop_end:
-+
-+	retl
-+	nop
-+
-+	SET_SIZE(t4_aes192_ecb_decrypt)
-+
-+
-+	ENTRY(t4_aes256_ecb_decrypt)
-+
-+	ldx	[%o0 + 0xe0], %g1	!ks[last-1]
-+	ldx	[%o0 + 0xe8], %g2	!ks[last]
-+	and	%o3, 16, %o4
-+	brz	%o4, ecbdec256_loop
-+	nop
-+
-+	ldx	[%o1], %o4
-+	ldx	[%o1 + 8], %o5
-+	xor	%g1, %o4, %g3	!initial ARK
-+	movxtod	%g3, %f60
-+	xor	%g2, %o5, %g3	!initial ARK
-+	movxtod	%g3, %f62
-+
-+	FOURTEEN_DROUNDS
-+
-+	std	%f60, [%o2]
-+	std	%f62, [%o2 + 0x8]
-+
-+	add	%o1, 16, %o1
-+	subcc	%o3, 16, %o3
-+	be	ecbdec256_loop_end
-+	add	%o2, 16, %o2
-+
-+ecbdec256_loop:
-+	ldx	[%o1], %o4
-+	ldx	[%o1 + 8], %o5
-+	xor	%g1, %o4, %g3	!initial ARK
-+	movxtod	%g3, %f20
-+	xor	%g2, %o5, %g3	!initial ARK
-+	movxtod	%g3, %f22
-+	ldx	[%o1 + 16], %o4
-+	ldx	[%o1 + 24], %o5
-+	xor	%g1, %o4, %g3	!initial ARK
-+	movxtod	%g3, %f60
-+	xor	%g2, %o5, %g3	!initial ARK
-+	movxtod	%g3, %f62
-+
-+	FOURTEEN_DROUNDS_2
-+
-+	std	%f20, [%o2]
-+	std	%f22, [%o2 + 8]
-+	std	%f60, [%o2 + 16]
-+	std	%f62, [%o2 + 24]
-+
-+	add	%o1, 32, %o1
-+	subcc	%o3, 32, %o3
-+	bne	ecbdec256_loop
-+	add	%o2, 32, %o2
-+
-+	ldd	[%o0 + 0x80], %f20
-+	ldd	[%o0 + 0x88], %f22
-+
-+ecbdec256_loop_end:
-+
-+	retl
-+	nop
-+
-+	SET_SIZE(t4_aes256_ecb_decrypt)
-+
-+#else
-+
-+	ENTRY(t4_aes128_ecb_decrypt)
-+
-+	ldx	[%o0 + 0xa0], %g1	!ks[last-1]
-+	ldx	[%o0 + 0xa8], %g2	!ks[last]
-+
-+ecbdec128_loop:
-+	ldx	[%o1], %o4
-+	ldx	[%o1 + 8], %o5
-+	xor	%g1, %o4, %g3	!initial ARK
-+	movxtod	%g3, %f60
-+	xor	%g2, %o5, %g3	!initial ARK
-+	movxtod	%g3, %f62
-+
-+	TEN_DROUNDS
-+
-+	std	%f60, [%o2]
-+	std	%f62, [%o2 + 0x8]
-+
-+	add	%o1, 16, %o1
-+	subcc	%o3, 16, %o3
-+	bne	ecbdec128_loop
-+	add	%o2, 16, %o2
-+
-+	retl
-+	nop
-+
-+	SET_SIZE(t4_aes128_ecb_decrypt)
-+
-+
-+	ENTRY(t4_aes192_ecb_decrypt)
-+
-+	ldx	[%o0 + 0xc0], %g1	!ks[last-1]
-+	ldx	[%o0 + 0xc8], %g2	!ks[last]
-+
-+ecbdec192_loop:
-+	ldx	[%o1], %o4
-+	ldx	[%o1 + 8], %o5
-+	xor	%g1, %o4, %g3	!initial ARK
-+	movxtod	%g3, %f60
-+	xor	%g2, %o5, %g3	!initial ARK
-+	movxtod	%g3, %f62
-+
-+	TWELVE_DROUNDS
-+
-+	std	%f60, [%o2]
-+	std	%f62, [%o2 + 0x8]
-+
-+	add	%o1, 16, %o1
-+	subcc	%o3, 16, %o3
-+	bne	ecbdec192_loop
-+	add	%o2, 16, %o2
-+
-+	retl
-+	nop
-+
-+	SET_SIZE(t4_aes192_ecb_decrypt)
-+
-+
-+	ENTRY(t4_aes256_ecb_decrypt)
-+
-+	ldx	[%o0 + 0xe0], %g1	!ks[last-1]
-+	ldx	[%o0 + 0xe8], %g2	!ks[last]
-+
-+ecbdec256_loop:
-+	ldx	[%o1], %o4
-+	ldx	[%o1 + 8], %o5
-+	xor	%g1, %o4, %g3	!initial ARK
-+	movxtod	%g3, %f60
-+	xor	%g2, %o5, %g3	!initial ARK
-+	movxtod	%g3, %f62
-+
-+	FOURTEEN_DROUNDS
-+
-+	std	%f60, [%o2]
-+	std	%f62, [%o2 + 0x8]
-+
-+	add	%o1, 16, %o1
-+	subcc	%o3, 16, %o3
-+	bne	ecbdec256_loop
-+	add	%o2, 16, %o2
-+
-+	retl
-+	nop
-+
-+	SET_SIZE(t4_aes256_ecb_decrypt)
-+
-+#endif
-+
-+#define	TEST_PARALLEL_CBC_DECRYPT
-+#ifdef	EST_PARALLEL_CBC_DECRYPT
-+		ENTRY(t4_aes128_cbc_decrypt)
-+
-+	save	%sp, -SA(MINFRAME), %sp
-+	ldx	[%i4], %o0		!IV
-+	ldx	[%i4 + 8], %o1		!IV
-+	ldx	[%i0 + 0xa0], %o2	!ks[last-1]
-+	ldx	[%i0 + 0xa8], %o3	!ks[last]
-+	and	%i3, 16, %o4
-+	brz	%o4, cbcdec128_loop
-+	nop
-+
-+	ldx	[%i1], %o4
-+	ldx	[%i1 + 8], %o5
-+	xor	%o2, %o4, %g1	!initial ARK
-+	movxtod	%g1, %f60
-+	xor	%o3, %o5, %g1	!initial ARK
-+	movxtod	%g1, %f62
-+
-+	TEN_DROUNDS
-+
-+	movxtod	%o0, %f56
-+	movxtod	%o1, %f58
-+	mov	%o4, %o0	!save last block as next IV
-+	mov	%o5, %o1
-+	fxor	%f56, %f60, %f60	!add in previous IV
-+	fxor	%f58, %f62, %f62
-+
-+	std	%f60, [%i2]
-+	std	%f62, [%i2 + 0x8]
-+
-+	add	%i1, 16, %i1
-+	subcc	%i3, 16, %i3
-+	be	cbcdec128_loop_end
-+	add	%i2, 16, %i2
-+
-+
-+cbcdec128_loop:
-+	ldx	[%i1], %g4
-+	ldx	[%i1 + 8], %g5
-+	xor	%o2, %g4, %g1	!initial ARK
-+	movxtod	%g1, %f0
-+	xor	%o3, %g5, %g1	!initial ARK
-+	movxtod	%g1, %f2
-+
-+	ldx	[%i1 + 16], %o4
-+	ldx	[%i1 + 24], %o5
-+	xor	%o2, %o4, %g1	!initial ARK
-+	movxtod	%g1, %f60
-+	xor	%o3, %o5, %g1	!initial ARK
-+	movxtod	%g1, %f62
-+
-+	TEN_DROUNDS_2
-+
-+	movxtod	%o0, %f6
-+	movxtod	%o1, %f4
-+	fxor	%f6, %f0, %f0	!add in previous IV
-+	fxor	%f4, %f2, %f2
-+
-+	std	%f0, [%i2]
-+	std	%f2, [%i2 + 8]
-+
-+	movxtod	%g4, %f56
-+	movxtod	%g5, %f58
-+	mov	%o4, %o0	!save last block as next IV
-+	mov	%o5, %o1
-+	fxor	%f56, %f60, %f60	!add in previous IV
-+	fxor	%f58, %f62, %f62
-+
-+	std	%f60, [%i2 + 16]
-+	std	%f62, [%i2 + 24]
-+
-+	add	%i1, 32, %i1
-+	subcc	%i3, 32, %i3
-+	bne	cbcdec128_loop
-+	add	%i2, 32, %i2
-+
-+cbcdec128_loop_end:
-+	stx	%o0, [%i4]
-+	stx	%o1, [%i4 + 8]
-+	ret
-+	restore
-+
-+	SET_SIZE(t4_aes128_cbc_decrypt)
-+
-+
-+	ENTRY(t4_aes192_cbc_decrypt)
-+
-+	save	%sp, -SA(MINFRAME), %sp
-+	ldx	[%i4], %o0		!IV
-+	ldx	[%i4 + 8], %o1		!IV
-+	ldx	[%i0 + 0xc0], %o2	!ks[last-1]
-+	ldx	[%i0 + 0xc8], %o3	!ks[last]
-+	and	%i3, 16, %o4
-+	brz	%o4, cbcdec192_loop
-+	nop
-+
-+	ldx	[%i1], %o4
-+	ldx	[%i1 + 8], %o5
-+	xor	%o2, %o4, %g1	!initial ARK
-+	movxtod	%g1, %f60
-+	xor	%o3, %o5, %g1	!initial ARK
-+	movxtod	%g1, %f62
-+
-+	TWELVE_DROUNDS
-+
-+	movxtod	%o0, %f56
-+	movxtod	%o1, %f58
-+	mov	%o4, %o0	!save last block as next IV
-+	mov	%o5, %o1
-+	fxor	%f56, %f60, %f60	!add in previous IV
-+	fxor	%f58, %f62, %f62
-+
-+	std	%f60, [%i2]
-+	std	%f62, [%i2 + 0x8]
-+
-+	add	%i1, 16, %i1
-+	subcc	%i3, 16, %i3
-+	be	cbcdec192_loop_end
-+	add	%i2, 16, %i2
-+
-+
-+cbcdec192_loop:
-+	ldx	[%i1], %g4
-+	ldx	[%i1 + 8], %g5
-+	xor	%o2, %g4, %g1	!initial ARK
-+	movxtod	%g1, %f0
-+	xor	%o3, %g5, %g1	!initial ARK
-+	movxtod	%g1, %f2
-+
-+	ldx	[%i1 + 16], %o4
-+	ldx	[%i1 + 24], %o5
-+	xor	%o2, %o4, %g1	!initial ARK
-+	movxtod	%g1, %f60
-+	xor	%o3, %o5, %g1	!initial ARK
-+	movxtod	%g1, %f62
-+
-+	TWELVE_DROUNDS_2
-+
-+	movxtod	%o0, %f6
-+	movxtod	%o1, %f4
-+	fxor	%f6, %f0, %f0	!add in previous IV
-+	fxor	%f4, %f2, %f2
-+
-+	std	%f0, [%i2]
-+	std	%f2, [%i2 + 8]
-+
-+	movxtod	%g4, %f56
-+	movxtod	%g5, %f58
-+	mov	%o4, %o0	!save last block as next IV
-+	mov	%o5, %o1
-+	fxor	%f56, %f60, %f60	!add in previous IV
-+	fxor	%f58, %f62, %f62
-+
-+	std	%f60, [%i2 + 16]
-+	std	%f62, [%i2 + 24]
-+
-+	add	%i1, 32, %i1
-+	subcc	%i3, 32, %i3
-+	bne	cbcdec192_loop
-+	add	%i2, 32, %i2
-+
-+cbcdec192_loop_end:
-+	stx	%o0, [%i4]
-+	stx	%o1, [%i4 + 8]
-+	ret
-+	restore
-+
-+	SET_SIZE(t4_aes192_cbc_decrypt)
-+
-+
-+	ENTRY(t4_aes256_cbc_decrypt)
-+
-+	save	%sp, -SA(MINFRAME), %sp
-+	mov	%i0, %o0		!FOURTEEN_DROUNDS uses %o0
-+	ldx	[%i4], %g2		!IV
-+	ldx	[%i4 + 8], %o1		!IV
-+	ldx	[%o0 + 0xe0], %o2	!ks[last-1]
-+	ldx	[%o0 + 0xe8], %o3	!ks[last]
-+	and	%i3, 16, %o4
-+	brz	%o4, cbcdec256_loop
-+	nop
-+
-+	ldx	[%i1], %o4
-+	ldx	[%i1 + 8], %o5
-+	xor	%o2, %o4, %g1	!initial ARK
-+	movxtod	%g1, %f60
-+	xor	%o3, %o5, %g1	!initial ARK
-+	movxtod	%g1, %f62
-+
-+	FOURTEEN_DROUNDS
-+
-+	movxtod	%g2, %f56
-+	movxtod	%o1, %f58
-+	mov	%o4, %g2	!save last block as next IV
-+	mov	%o5, %o1
-+	fxor	%f56, %f60, %f60	!add in previous IV
-+	fxor	%f58, %f62, %f62
-+
-+	std	%f60, [%i2]
-+	std	%f62, [%i2 + 0x8]
-+
-+	add	%i1, 16, %i1
-+	subcc	%i3, 16, %i3
-+	be	cbcdec256_loop_end
-+	add	%i2, 16, %i2
-+
-+
-+cbcdec256_loop:
-+	ldx	[%i1], %g4
-+	ldx	[%i1 + 8], %g5
-+	xor	%o2, %g4, %g1	!initial ARK
-+	movxtod	%g1, %f20
-+	xor	%o3, %g5, %g1	!initial ARK
-+	movxtod	%g1, %f22
-+
-+	ldx	[%i1 + 16], %o4
-+	ldx	[%i1 + 24], %o5
-+	xor	%o2, %o4, %g1	!initial ARK
-+	movxtod	%g1, %f60
-+	xor	%o3, %o5, %g1	!initial ARK
-+	movxtod	%g1, %f62
-+
-+	FOURTEEN_DROUNDS_2
-+
-+	movxtod	%g2, %f56
-+	movxtod	%o1, %f58
-+	fxor	%f56, %f20, %f20	!add in previous IV
-+	fxor	%f58, %f22, %f22
-+
-+	std	%f20, [%i2]
-+	std	%f22, [%i2 + 8]
-+
-+	movxtod	%g4, %f56
-+	movxtod	%g5, %f58
-+	mov	%o4, %g2	!save last block as next IV
-+	mov	%o5, %o1
-+	fxor	%f56, %f60, %f60	!add in previous IV
-+	fxor	%f58, %f62, %f62
-+
-+	std	%f60, [%i2 + 16]
-+	std	%f62, [%i2 + 24]
-+
-+	add	%i1, 32, %i1
-+	subcc	%i3, 32, %i3
-+	bne	cbcdec256_loop
-+	add	%i2, 32, %i2
-+
-+	ldd	[%o0 + 0x80], %f20
-+	ldd	[%o0 + 0x88], %f22
-+
-+cbcdec256_loop_end:
-+	stx	%g2, [%i4]
-+	stx	%o1, [%i4 + 8]
-+	ret
-+	restore
-+
-+	SET_SIZE(t4_aes256_cbc_decrypt)
-+
-+#else
-+
-+	ENTRY(t4_aes128_cbc_decrypt)
-+
-+	save	%sp, -SA(MINFRAME), %sp
-+	ldx	[%i4], %o0		!IV
-+	ldx	[%i4 + 8], %o1		!IV
-+	ldx	[%i0 + 0xa0], %o2	!ks[last-1]
-+	ldx	[%i0 + 0xa8], %o3	!ks[last]
-+
-+cbcdec128_loop:
-+	ldx	[%i1], %o4
-+	ldx	[%i1 + 8], %o5
-+	xor	%o2, %o4, %g1	!initial ARK
-+	movxtod	%g1, %f60
-+	xor	%o3, %o5, %g1	!initial ARK
-+	movxtod	%g1, %f62
-+
-+	TEN_DROUNDS
-+
-+	movxtod	%o0, %f56
-+	movxtod	%o1, %f58
-+	mov	%o4, %o0	!save last block as next IV
-+	mov	%o5, %o1
-+	fxor	%f56, %f60, %f60	!add in previous IV
-+	fxor	%f58, %f62, %f62
-+
-+	std	%f60, [%i2]
-+	std	%f62, [%i2 + 0x8]
-+
-+	add	%i1, 16, %i1
-+	subcc	%i3, 16, %i3
-+	bne	cbcdec128_loop
-+	add	%i2, 16, %i2
-+
-+	stx	%o0, [%i4]
-+	stx	%o1, [%i4 + 8]
-+	ret
-+	restore
-+
-+	SET_SIZE(t4_aes128_cbc_decrypt)
-+
-+
-+	ENTRY(t4_aes192_cbc_decrypt)
-+
-+	save	%sp, -SA(MINFRAME), %sp
-+	ldx	[%i4], %o0		!IV
-+	ldx	[%i4 + 8], %o1		!IV
-+	ldx	[%i0 + 0xc0], %o2	!ks[last-1]
-+	ldx	[%i0 + 0xc8], %o3	!ks[last]
-+
-+cbcdec192_loop:
-+	ldx	[%i1], %o4
-+	ldx	[%i1 + 8], %o5
-+	xor	%o2, %o4, %g1	!initial ARK
-+	movxtod	%g1, %f60
-+	xor	%o3, %o5, %g1	!initial ARK
-+	movxtod	%g1, %f62
-+
-+	TWELVE_DROUNDS
-+
-+	movxtod	%o0, %f56
-+	movxtod	%o1, %f58
-+	mov	%o4, %o0	!save last block as next IV
-+	mov	%o5, %o1
-+	fxor	%f56, %f60, %f60	!add in previous IV
-+	fxor	%f58, %f62, %f62
-+
-+	std	%f60, [%i2]
-+	std	%f62, [%i2 + 0x8]
-+
-+	add	%i1, 16, %i1
-+	subcc	%i3, 16, %i3
-+	bne	cbcdec192_loop
-+	add	%i2, 16, %i2
-+
-+	stx	%o0, [%i4]
-+	stx	%o1, [%i4 + 8]
-+	ret
-+	restore
-+
-+	SET_SIZE(t4_aes192_cbc_decrypt)
-+
-+
-+	ENTRY(t4_aes256_cbc_decrypt)
-+
-+	save	%sp, -SA(MINFRAME), %sp
-+	ldx	[%i4], %o0		!IV
-+	ldx	[%i4 + 8], %o1		!IV
-+	ldx	[%i0 + 0xe0], %o2	!ks[last-1]
-+	ldx	[%i0 + 0xe8], %o3	!ks[last]
-+
-+cbcdec256_loop:
-+	ldx	[%i1], %o4
-+	ldx	[%i1 + 8], %o5
-+	xor	%o2, %o4, %g1	!initial ARK
-+	movxtod	%g1, %f60
-+	xor	%o3, %o5, %g1	!initial ARK
-+	movxtod	%g1, %f62
-+
-+	FOURTEEN_DROUNDS
-+
-+	movxtod	%o0, %f56
-+	movxtod	%o1, %f58
-+	mov	%o4, %o0	!save last block as next IV
-+	mov	%o5, %o1
-+	fxor	%f56, %f60, %f60	!add in previous IV
-+	fxor	%f58, %f62, %f62
-+
-+	std	%f60, [%i2]
-+	std	%f62, [%i2 + 0x8]
-+
-+	add	%i1, 16, %i1
-+	subcc	%i3, 16, %i3
-+	bne	cbcdec256_loop
-+	add	%i2, 16, %i2
-+
-+	stx	%o0, [%i4]
-+	stx	%o1, [%i4 + 8]
-+	ret
-+	restore
-+
-+	SET_SIZE(t4_aes256_cbc_decrypt)
-+
-+#endif
-+
-+#define	TEST_PARALLEL_CFB128_DECRYPT
-+#ifdef	TEST_PARALLEL_CFB128_DECRYPT
-+
-+	ENTRY(t4_aes128_cfb128_decrypt)
-+
-+	ldd	[%o4], %f56	!IV
-+	ldd	[%o4 + 8], %f58	!IV
-+	ldx	[%o0], %g1	! ks[0]
-+	ldx	[%o0 + 8], %g2	! ks[1]
-+	and	%o3, 16, %o5
-+	brz	%o5, cfb128dec_128_loop
-+
-+	movxtod	%g1, %f60
-+	movxtod	%g2, %f62
-+	fxor	%f60, %f56, %f60
-+	fxor	%f62, %f58, %f62
-+
-+	/* CFB mode uses encryption for the decrypt operation */
-+	TEN_EROUNDS
-+
-+	ldd	[%o1], %f56	!input
-+	ldd	[%o1 + 8], %f58	!input
-+	fxor	%f60, %f56, %f60
-+	fxor	%f62, %f58, %f62
-+
-+	std	%f60, [%o2]
-+	std	%f62, [%o2 + 8]
-+
-+	add	%o1, 16, %o1
-+	subcc	%o3, 16, %o3
-+	be	cfb128dec_128_loop_end
-+	add	%o2, 16, %o2
-+
-+cfb128dec_128_loop:
-+	ldd	[%o1], %f6	!input
-+	ldd	[%o1 + 8], %f4	!input
-+	movxtod	%g1, %f60
-+	movxtod	%g2, %f62
-+	fxor	%f60, %f6, %f0
-+	fxor	%f62, %f4, %f2
-+	fxor	%f60, %f56, %f60
-+	fxor	%f62, %f58, %f62
-+
-+	/* CFB mode uses encryption for the decrypt operation */
-+	TEN_EROUNDS_2
-+
-+	ldd	[%o1], %f6	!input
-+	ldd	[%o1 + 8], %f4	!input
-+	ldd	[%o1 + 16], %f56	!input
-+	ldd	[%o1 + 24], %f58	!input
-+
-+	fxor	%f60, %f6, %f6
-+	fxor	%f62, %f4, %f4
-+	fxor	%f0, %f56, %f60
-+	fxor	%f2, %f58, %f62
-+
-+	std	%f6, [%o2]
-+	std	%f4, [%o2 + 8]
-+	std	%f60, [%o2 + 16]
-+	std	%f62, [%o2 + 24]
-+
-+	add	%o1, 32, %o1
-+	subcc	%o3, 32, %o3
-+	bne	cfb128dec_128_loop
-+	add	%o2, 32, %o2
-+
-+cfb128dec_128_loop_end:
-+	std	%f56, [%o4]
-+	retl
-+	std	%f58, [%o4 + 8]
-+
-+	SET_SIZE(t4_aes128_cfb128_decrypt)
-+
-+
-+	ENTRY(t4_aes192_cfb128_decrypt)
-+
-+	ldd	[%o4], %f56	!IV
-+	ldd	[%o4 + 8], %f58	!IV
-+	ldx	[%o0], %g1	! ks[0]
-+	ldx	[%o0 + 8], %g2	! ks[1]
-+	and	%o3, 16, %o5
-+	brz	%o5, cfb128dec_192_loop
-+
-+	movxtod	%g1, %f60
-+	movxtod	%g2, %f62
-+	fxor	%f60, %f56, %f60
-+	fxor	%f62, %f58, %f62
-+
-+	/* CFB mode uses encryption for the decrypt operation */
-+	TWELVE_EROUNDS
-+
-+	ldd	[%o1], %f56	!input
-+	ldd	[%o1 + 8], %f58	!input
-+	fxor	%f60, %f56, %f60
-+	fxor	%f62, %f58, %f62
-+
-+	std	%f60, [%o2]
-+	std	%f62, [%o2 + 8]
-+
-+	add	%o1, 16, %o1
-+	subcc	%o3, 16, %o3
-+	be	cfb128dec_192_loop_end
-+	add	%o2, 16, %o2
-+
-+cfb128dec_192_loop:
-+	ldd	[%o1], %f6	!input
-+	ldd	[%o1 + 8], %f4	!input
-+	movxtod	%g1, %f60
-+	movxtod	%g2, %f62
-+	fxor	%f60, %f6, %f0
-+	fxor	%f62, %f4, %f2
-+	fxor	%f60, %f56, %f60
-+	fxor	%f62, %f58, %f62
-+
-+	/* CFB mode uses encryption for the decrypt operation */
-+	TWELVE_EROUNDS_2
-+
-+	ldd	[%o1], %f6	!input
-+	ldd	[%o1 + 8], %f4	!input
-+	ldd	[%o1 + 16], %f56	!input
-+	ldd	[%o1 + 24], %f58	!input
-+
-+	fxor	%f60, %f6, %f6
-+	fxor	%f62, %f4, %f4
-+	fxor	%f0, %f56, %f60
-+	fxor	%f2, %f58, %f62
-+
-+	std	%f6, [%o2]
-+	std	%f4, [%o2 + 8]
-+	std	%f60, [%o2 + 16]
-+	std	%f62, [%o2 + 24]
-+
-+	add	%o1, 32, %o1
-+	subcc	%o3, 32, %o3
-+	bne	cfb128dec_192_loop
-+	add	%o2, 32, %o2
-+
-+cfb128dec_192_loop_end:
-+	std	%f56, [%o4]
-+	retl
-+	std	%f58, [%o4 + 8]
-+
-+	SET_SIZE(t4_aes192_cfb128_decrypt)
-+
-+
-+	ENTRY(t4_aes256_cfb128_decrypt)
-+
-+	ldd	[%o4], %f56	!IV
-+	ldd	[%o4 + 8], %f58	!IV
-+	ldx	[%o0], %g1	! ks[0]
-+	ldx	[%o0 + 8], %g2	! ks[1]
-+	and	%o3, 16, %o5
-+	brz	%o5, cfb128dec_256_loop
-+
-+	movxtod	%g1, %f60
-+	movxtod	%g2, %f62
-+	fxor	%f60, %f56, %f60
-+	fxor	%f62, %f58, %f62
-+
-+	/* CFB mode uses encryption for the decrypt operation */
-+	FOURTEEN_EROUNDS
-+
-+	ldd	[%o1], %f56	!input
-+	ldd	[%o1 + 8], %f58	!input
-+	fxor	%f60, %f56, %f60
-+	fxor	%f62, %f58, %f62
-+
-+	std	%f60, [%o2]
-+	std	%f62, [%o2 + 8]
-+
-+	add	%o1, 16, %o1
-+	subcc	%o3, 16, %o3
-+	be	cfb128dec_256_loop_end
-+	add	%o2, 16, %o2
-+
-+cfb128dec_256_loop:
-+	ldd	[%o1], %f20	!input
-+	ldd	[%o1 + 8], %f22	!input
-+	movxtod	%g1, %f60
-+	movxtod	%g2, %f62
-+	fxor	%f60, %f20, %f20
-+	fxor	%f62, %f22, %f22
-+	fxor	%f60, %f56, %f60
-+	fxor	%f62, %f58, %f62
-+
-+	/* CFB mode uses encryption for the decrypt operation */
-+	FOURTEEN_EROUNDS_2
-+
-+	ldd	[%o1 + 16], %f56	!input
-+	ldd	[%o1 + 24], %f58	!input
-+	fxor	%f20, %f56, %f20
-+	fxor	%f22, %f58, %f22
-+	std	%f20, [%o2 + 16]
-+	std	%f22, [%o2 + 24]
-+
-+	ldd	[%o1], %f20	!input
-+	ldd	[%o1 + 8], %f22	!input
-+
-+	fxor	%f60, %f20, %f20
-+	fxor	%f62, %f22, %f22
-+
-+	std	%f20, [%o2]
-+	std	%f22, [%o2 + 8]
-+
-+	add	%o1, 32, %o1
-+	subcc	%o3, 32, %o3
-+	bne	cfb128dec_256_loop
-+	add	%o2, 32, %o2
-+
-+	ldd	[%o0 + 0x60], %f20
-+	ldd	[%o0 + 0x68], %f22
-+
-+cfb128dec_256_loop_end:
-+	std	%f56, [%o4]
-+	retl
-+	std	%f58, [%o4 + 8]
-+
-+	SET_SIZE(t4_aes256_cfb128_decrypt)
-+
-+#else
-+	ENTRY(t4_aes128_cfb128_decrypt)
-+
-+	ldd	[%o4], %f56	!IV
-+	ldd	[%o4 + 8], %f58	!IV
-+	ldx	[%o0], %g1	! ks[0]
-+	ldx	[%o0 + 8], %g2	! ks[1]
-+
-+cfb128dec_128_loop:
-+	movxtod	%g1, %f60
-+	movxtod	%g2, %f62
-+	fxor	%f60, %f56, %f60
-+	fxor	%f62, %f58, %f62
-+
-+	/* CFB mode uses encryption for the decrypt operation */
-+	TEN_EROUNDS
-+
-+	ldd	[%o1], %f56	!input
-+	ldd	[%o1 + 8], %f58	!input
-+	fxor	%f60, %f56, %f60
-+	fxor	%f62, %f58, %f62
-+
-+	std	%f60, [%o2]
-+	std	%f62, [%o2 + 8]
-+
-+	add	%o1, 16, %o1
-+	subcc	%o3, 16, %o3
-+	bne	cfb128dec_128_loop
-+	add	%o2, 16, %o2
-+
-+	std	%f56, [%o4]
-+	retl
-+	std	%f58, [%o4 + 8]
-+
-+	SET_SIZE(t4_aes128_cfb128_decrypt)
-+
-+
-+	ENTRY(t4_aes192_cfb128_decrypt)
-+
-+	ldd	[%o4], %f56	!IV
-+	ldd	[%o4 + 8], %f58	!IV
-+	ldx	[%o0], %g1	! ks[0]
-+	ldx	[%o0 + 8], %g2	! ks[1]
-+
-+cfb128dec_192_loop:
-+	movxtod	%g1, %f60
-+	movxtod	%g2, %f62
-+	fxor	%f60, %f56, %f60
-+	fxor	%f62, %f58, %f62
-+
-+	/* CFB mode uses encryption for the decrypt operation */
-+	TWELVE_EROUNDS
-+
-+	ldd	[%o1], %f56	!input
-+	ldd	[%o1 + 8], %f58	!input
-+	fxor	%f60, %f56, %f60
-+	fxor	%f62, %f58, %f62
-+
-+	std	%f60, [%o2]
-+	std	%f62, [%o2 + 8]
-+
-+	add	%o1, 16, %o1
-+	subcc	%o3, 16, %o3
-+	bne	cfb128dec_192_loop
-+	add	%o2, 16, %o2
-+
-+	std	%f56, [%o4]
-+	retl
-+	std	%f58, [%o4 + 8]
-+
-+	SET_SIZE(t4_aes192_cfb128_decrypt)
-+
-+
-+	ENTRY(t4_aes256_cfb128_decrypt)
-+
-+	ldd	[%o4], %f56	!IV
-+	ldd	[%o4 + 8], %f58	!IV
-+	ldx	[%o0], %g1	! ks[0]
-+	ldx	[%o0 + 8], %g2	! ks[1]
-+
-+cfb128dec_256_loop:
-+	movxtod	%g1, %f60
-+	movxtod	%g2, %f62
-+	fxor	%f60, %f56, %f60
-+	fxor	%f62, %f58, %f62
-+
-+	/* CFB mode uses encryption for the decrypt operation */
-+	FOURTEEN_EROUNDS
-+
-+	ldd	[%o1], %f56	!input
-+	ldd	[%o1 + 8], %f58	!input
-+	fxor	%f60, %f56, %f60
-+	fxor	%f62, %f58, %f62
-+
-+	std	%f60, [%o2]
-+	std	%f62, [%o2 + 8]
-+
-+	add	%o1, 16, %o1
-+	subcc	%o3, 16, %o3
-+	bne	cfb128dec_256_loop
-+	add	%o2, 16, %o2
-+
-+	std	%f56, [%o4]
-+	retl
-+	std	%f58, [%o4 + 8]
-+
-+	SET_SIZE(t4_aes256_cfb128_decrypt)
-+
-+#endif
-+
-+#endif	/* lint || __lint */
-diff -uNr openssl-1.0.1c.orig/crypto/des/Makefile openssl-1.0.1c/crypto/des/Makefile
---- openssl-1.0.1c.orig/crypto/des/Makefile	2011-08-14 15:46:47.000000000 +0200
-+++ openssl-1.0.1c/crypto/des/Makefile	2012-05-11 22:19:17.483690000 +0200
-@@ -17,6 +17,10 @@
- ASFLAGS= $(INCLUDES) $(ASFLAG)
- AFLAGS= $(ASFLAGS)
- 
-+BITS:=	$(shell if grep '^CFLAG.*=.*-m64' ../../Makefile >/dev/null; \
-+		then echo 64; else echo 32; fi)
-+ASFLAGSYF= -xregsym=no -K pic -P -xarch=v9v -D_sparcv9 -D_ASM -Dsparc -m$(BITS)
-+
- GENERAL=Makefile
- TEST=destest.c
- APPS=
-@@ -62,6 +66,10 @@
- des_enc-sparc.S:	asm/des_enc.m4
- 	m4 -B 8192 asm/des_enc.m4 > des_enc-sparc.S
- 
-+t4_des.o: asm/t4_des.S
-+	as $(ASFLAGSYF) -o $@ asm/t4_des.S
-+	elfedit -e 'cap:hw1 -and -cmp vis vis3' $@
-+
- des-586.s:	asm/des-586.pl ../perlasm/x86asm.pl ../perlasm/cbc.pl
- 	$(PERL) asm/des-586.pl $(PERLASM_SCHEME) $(CFLAGS) > $@
- crypt586.s:	asm/crypt586.pl ../perlasm/x86asm.pl ../perlasm/cbc.pl
-diff -uNr openssl-1.0.1c.orig/crypto/des/asm/t4_des.S openssl-1.0.1c/crypto/des/asm/t4_des.S
---- openssl-1.0.1c.orig/crypto/des/asm/t4_des.S	1970-01-01 01:00:00.000000000 +0100
-+++ openssl-1.0.1c/crypto/des/asm/t4_des.S	2012-05-11 21:34:39.694955000 +0200
-@@ -0,0 +1,786 @@
-+/*
-+ * ====================================================================
-+ * Copyright (c) 1998-2011 The OpenSSL Project.  All rights reserved.
-+ *
-+ * Redistribution and use in source and binary forms, with or without
-+ * modification, are permitted provided that the following conditions
-+ * are met:
-+ *
-+ * 1. Redistributions of source code must retain the above copyright
-+ *    notice, this list of conditions and the following disclaimer.
-+ *
-+ * 2. Redistributions in binary form must reproduce the above copyright
-+ *    notice, this list of conditions and the following disclaimer in
-+ *    the documentation and/or other materials provided with the
-+ *    distribution.
-+ *
-+ * 3. All advertising materials mentioning features or use of this
-+ *    software must display the following acknowledgment:
-+ *    "This product includes software developed by the OpenSSL Project
-+ *    for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
-+ *
-+ * 4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to
-+ *    endorse or promote products derived from this software without
-+ *    prior written permission. For written permission, please contact
-+ *    openssl-core at openssl.org.
-+ *
-+ * 5. Products derived from this software may not be called "OpenSSL"
-+ *    nor may "OpenSSL" appear in their names without prior written
-+ *    permission of the OpenSSL Project.
-+ *
-+ * 6. Redistributions of any form whatsoever must retain the following
-+ *    acknowledgment:
-+ *    "This product includes software developed by the OpenSSL Project
-+ *    for use in the OpenSSL Toolkit (http://www.openssl.org/)"
-+ *
-+ * THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY
-+ * EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
-+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
-+ * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE OpenSSL PROJECT OR
-+ * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
-+ * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
-+ * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-+ * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
-+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
-+ * STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
-+ * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
-+ * OF THE POSSIBILITY OF SUCH DAMAGE.
-+ * ====================================================================
-+ */
-+
-+/*
-+ * Copyright (c) 2011, Oracle and/or its affiliates. All rights reserved.
-+ */
-+
-+/*LINTLIBRARY*/
-+
-+#if defined(lint) || defined(__lint)
-+
-+#include <sys/types.h>
-+#include "../engine/eng_t4_des_asm.h"
-+
-+/*ARGSUSED*/
-+void t4_des_expand(uint64_t *rk, const uint32_t *key)
-+{ return; }
-+
-+/*ARGSUSED*/
-+void t4_des_encrypt(const uint64_t *rk, const uint64_t *pt, uint64_t *ct)
-+{ return; }
-+
-+
-+/*ARGSUSED*/
-+void t4_des_load_keys(uint64_t *ks)
-+{ return; }
-+
-+/*ARGSUSED*/
-+void t4_des_ecb_crypt(uint64_t *ks, uint64_t *asm_in,
-+    uint64_t *asm_out, size_t amount_to_crypt, uint64_t *iv)
-+{ return; }
-+
-+/*ARGSUSED*/
-+void t4_des_cbc_encrypt(uint64_t *ks, uint64_t *asm_in,
-+    uint64_t *asm_out, size_t amount_to_crypt, uint64_t *iv)
-+{ return; }
-+
-+/*ARGSUSED*/
-+void t4_des_cbc_decrypt(uint64_t *ks, uint64_t *asm_in,
-+    uint64_t *asm_out, size_t amount_to_crypt, uint64_t *iv)
-+{ return; }
-+
-+/*ARGSUSED*/
-+void t4_des3_load_keys(uint64_t *ks)
-+{ return; }
-+
-+/*ARGSUSED*/
-+void t4_des3_ecb_crypt(uint64_t *ks, uint64_t *asm_in,
-+    uint64_t *asm_out, size_t amount_to_crypt, uint64_t *iv)
-+{ return; }
-+
-+/*ARGSUSED*/
-+void t4_des3_cbc_encrypt(uint64_t *ks, uint64_t *asm_in,
-+    uint64_t *asm_out, size_t amount_to_crypt, uint64_t *iv)
-+{ return; }
-+
-+/*ARGSUSED*/
-+void t4_des3_cbc_decrypt(uint64_t *ks, uint64_t *asm_in,
-+    uint64_t *asm_out, size_t amount_to_crypt, uint64_t *iv)
-+{ return; }
-+
-+#else	/* lint || __lint */
-+
-+#include <sys/asm_linkage.h>
-+
-+
-+	ENTRY(t4_des_expand)
-+
-+!load key
-+	ld	[%o1], %f0
-+	ld	[%o1 + 0x4], %f1
-+
-+!expand the key
-+	!des_kexpand %f0, 0, %f0
-+	!des_kexpand %f0, 1, %f2
-+	!des_kexpand %f2, 3, %f6
-+	!des_kexpand %f2, 2, %f4
-+	!des_kexpand %f6, 3, %f10
-+	!des_kexpand %f6, 2, %f8
-+	!des_kexpand %f10, 3, %f14
-+	!des_kexpand %f10, 2, %f12
-+	!des_kexpand %f14, 1, %f16
-+	!des_kexpand %f16, 3, %f20
-+	!des_kexpand %f16, 2, %f18
-+	!des_kexpand %f20, 3, %f24
-+	!des_kexpand %f20, 2, %f22
-+	!des_kexpand %f24, 3, %f28
-+	!des_kexpand %f24, 2, %f26
-+	!des_kexpand %f28, 1, %f30
-+	.byte	0x81, 0xb0, 0x26, 0xc0
-+	.byte	0x85, 0xb0, 0x26, 0xc1
-+	.byte	0x8d, 0xb0, 0xa6, 0xc3
-+	.byte	0x89, 0xb0, 0xa6, 0xc2
-+	.byte	0x95, 0xb1, 0xa6, 0xc3
-+	.byte	0x91, 0xb1, 0xa6, 0xc2
-+	.byte	0x9d, 0xb2, 0xa6, 0xc3
-+	.byte	0x99, 0xb2, 0xa6, 0xc2
-+	.byte	0xa1, 0xb3, 0xa6, 0xc1
-+	.byte	0xa9, 0xb4, 0x26, 0xc3
-+	.byte	0xa5, 0xb4, 0x26, 0xc2
-+	.byte	0xb1, 0xb5, 0x26, 0xc3
-+	.byte	0xad, 0xb5, 0x26, 0xc2
-+	.byte	0xb9, 0xb6, 0x26, 0xc3
-+	.byte	0xb5, 0xb6, 0x26, 0xc2
-+	.byte	0xbd, 0xb7, 0x26, 0xc1
-+
-+!copy expanded key back into array
-+	std	%f0, [%o0]
-+	std	%f2, [%o0 + 0x8]
-+	std	%f4, [%o0 + 0x10]
-+	std	%f6, [%o0 + 0x18]
-+	std	%f8, [%o0 + 0x20]
-+	std	%f10, [%o0 + 0x28]
-+	std	%f12, [%o0 + 0x30]
-+	std	%f14, [%o0 + 0x38]
-+	std	%f16, [%o0 + 0x40]
-+	std	%f18, [%o0 + 0x48]
-+	std	%f20, [%o0 + 0x50]
-+	std	%f22, [%o0 + 0x58]
-+	std	%f24, [%o0 + 0x60]
-+	std	%f26, [%o0 + 0x68]
-+	std	%f28, [%o0 + 0x70]
-+	retl
-+	std	%f30, [%o0 + 0x78]
-+
-+	SET_SIZE(t4_des_expand)
-+
-+
-+	ENTRY(t4_des_encrypt)
-+
-+!load expanded key
-+	ldd	[%o0], %f0
-+	ldd	[%o0 + 0x8], %f2
-+	ldd	[%o0 + 0x10], %f4
-+	ldd	[%o0 + 0x18], %f6
-+	ldd	[%o0 + 0x20], %f8
-+	ldd	[%o0 + 0x28], %f10
-+	ldd	[%o0 + 0x30], %f12
-+	ldd	[%o0 + 0x38], %f14
-+	ldd	[%o0 + 0x40], %f16
-+	ldd	[%o0 + 0x48], %f18
-+	ldd	[%o0 + 0x50], %f20
-+	ldd	[%o0 + 0x58], %f22
-+	ldd	[%o0 + 0x60], %f24
-+	ldd	[%o0 + 0x68], %f26
-+	ldd	[%o0 + 0x70], %f28
-+	ldd	[%o0 + 0x78], %f30
-+
-+!load input
-+	ldd	[%o1], %f32
-+
-+!perform the cipher transformation
-+	!des_ip	%f32, %f32
-+	!des_round %f0,  %f2,  %f32, %f32
-+	!des_round %f4,  %f6,  %f32, %f32
-+	!des_round %f8,  %f10, %f32, %f32
-+	!des_round %f12, %f14, %f32, %f32
-+	!des_round %f16, %f18, %f32, %f32
-+	!des_round %f20, %f22, %f32, %f32
-+	!des_round %f24, %f26, %f32, %f32
-+	!des_round %f28, %f30, %f32, %f32
-+	!des_iip	%f32, %f32
-+	.byte	0x83, 0xb0, 0x66, 0x80
-+	.byte	0x82, 0xc8, 0x03, 0x22
-+	.byte	0x82, 0xc9, 0x03, 0x26
-+	.byte	0x82, 0xca, 0x03, 0x2a
-+	.byte	0x82, 0xcb, 0x03, 0x2e
-+	.byte	0x82, 0xcc, 0x03, 0x32
-+	.byte	0x82, 0xcd, 0x03, 0x36
-+	.byte	0x82, 0xce, 0x03, 0x3a
-+	.byte	0x82, 0xcf, 0x03, 0x3e
-+	.byte	0x83, 0xb0, 0x66, 0xa0
-+
-+!copy output back to array
-+	retl
-+	std	%f32, [%o2]
-+
-+	SET_SIZE(t4_des_encrypt)
-+
-+	ENTRY(t4_des_load_keys)
-+
-+!load expanded key
-+	ldd	[%o0], %f0
-+	ldd	[%o0 + 0x8], %f2
-+	ldd	[%o0 + 0x10], %f4
-+	ldd	[%o0 + 0x18], %f6
-+	ldd	[%o0 + 0x20], %f8
-+	ldd	[%o0 + 0x28], %f10
-+	ldd	[%o0 + 0x30], %f12
-+	ldd	[%o0 + 0x38], %f14
-+	ldd	[%o0 + 0x40], %f16
-+	ldd	[%o0 + 0x48], %f18
-+	ldd	[%o0 + 0x50], %f20
-+	ldd	[%o0 + 0x58], %f22
-+	ldd	[%o0 + 0x60], %f24
-+	ldd	[%o0 + 0x68], %f26
-+	ldd	[%o0 + 0x70], %f28
-+	retl
-+	ldd	[%o0 + 0x78], %f30
-+
-+	SET_SIZE(t4_des_load_keys)
-+
-+	ENTRY(t4_des3_load_keys)
-+
-+!load first 30 pieces of the expanded key
-+	ldd	[%o0], %f0
-+	ldd	[%o0 + 0x8], %f2
-+	ldd	[%o0 + 0x10], %f4
-+	ldd	[%o0 + 0x18], %f6
-+	ldd	[%o0 + 0x20], %f8
-+	ldd	[%o0 + 0x28], %f10
-+	ldd	[%o0 + 0x30], %f12
-+	ldd	[%o0 + 0x38], %f14
-+	ldd	[%o0 + 0x40], %f16
-+	ldd	[%o0 + 0x48], %f18
-+	ldd	[%o0 + 0x50], %f20
-+	ldd	[%o0 + 0x58], %f22
-+	ldd	[%o0 + 0x60], %f24
-+	ldd	[%o0 + 0x68], %f26
-+	ldd	[%o0 + 0x70], %f28
-+	ldd	[%o0 + 0x78], %f30
-+	ldd	[%o0 + 0x80], %f32
-+	ldd	[%o0 + 0x88], %f34
-+	ldd	[%o0 + 0x90], %f36
-+	ldd	[%o0 + 0x98], %f38
-+	ldd	[%o0 + 0xa0], %f40
-+	ldd	[%o0 + 0xa8], %f42
-+	ldd	[%o0 + 0xb0], %f44
-+	ldd	[%o0 + 0xb8], %f46
-+	ldd	[%o0 + 0xc0], %f48
-+	ldd	[%o0 + 0xc8], %f50
-+	ldd	[%o0 + 0xd0], %f52
-+	ldd	[%o0 + 0xd8], %f54
-+	ldd	[%o0 + 0xe0], %f56
-+	retl
-+	ldd	[%o0 + 0xe8], %f58
-+
-+	SET_SIZE(t4_des3_load_keys)
-+
-+	ENTRY(t4_des_ecb_crypt)
-+
-+des_ecb_loop:
-+!load input
-+	ldd	[%o1], %f62
-+
-+!perform the cipher transformation
-+	!des_ip	%f62, %f62
-+	!des_round %f0,  %f2,  %f62, %f62
-+	!des_round %f4,  %f6,  %f62, %f62
-+	!des_round %f8,  %f10, %f62, %f62
-+	!des_round %f12, %f14, %f62, %f62
-+	!des_round %f16, %f18, %f62, %f62
-+	!des_round %f20, %f22, %f62, %f62
-+	!des_round %f24, %f26, %f62, %f62
-+	!des_round %f28, %f30, %f62, %f62
-+	!des_iip	%f62, %f62
-+	.byte	0xbf, 0xb7, 0xe6, 0x80
-+	.byte	0xbe, 0xc8, 0x3f, 0x22
-+	.byte	0xbe, 0xc9, 0x3f, 0x26
-+	.byte	0xbe, 0xca, 0x3f, 0x2a
-+	.byte	0xbe, 0xcb, 0x3f, 0x2e
-+	.byte	0xbe, 0xcc, 0x3f, 0x32
-+	.byte	0xbe, 0xcd, 0x3f, 0x36
-+	.byte	0xbe, 0xce, 0x3f, 0x3a
-+	.byte	0xbe, 0xcf, 0x3f, 0x3e
-+	.byte	0xbf, 0xb7, 0xe6, 0xa0
-+
-+!copy output back to array
-+	std	%f62, [%o2]
-+	sub	%o3, 8, %o3
-+	add	%o1, 8, %o1
-+	brnz	%o3, des_ecb_loop
-+	add	%o2, 8, %o2
-+
-+	retl
-+	nop
-+
-+	SET_SIZE(t4_des_ecb_crypt)
-+
-+
-+	ENTRY(t4_des_cbc_encrypt)
-+
-+	ldd	[%o4], %f60
-+des_cbc_encrypt_loop:
-+!load input
-+	ldd	[%o1], %f58
-+	fxor	%f58, %f60, %f62
-+
-+!perform the cipher transformation
-+	!des_ip	%f62, %f62
-+	!des_round %f0,  %f2,  %f62, %f62
-+	!des_round %f4,  %f6,  %f62, %f62
-+	!des_round %f8,  %f10, %f62, %f62
-+	!des_round %f12, %f14, %f62, %f62
-+	!des_round %f16, %f18, %f62, %f62
-+	!des_round %f20, %f22, %f62, %f62
-+	!des_round %f24, %f26, %f62, %f62
-+	!des_round %f28, %f30, %f62, %f62
-+	!des_iip	%f62, %f60
-+	.byte	0xbf, 0xb7, 0xe6, 0x80
-+	.byte	0xbe, 0xc8, 0x3f, 0x22
-+	.byte	0xbe, 0xc9, 0x3f, 0x26
-+	.byte	0xbe, 0xca, 0x3f, 0x2a
-+	.byte	0xbe, 0xcb, 0x3f, 0x2e
-+	.byte	0xbe, 0xcc, 0x3f, 0x32
-+	.byte	0xbe, 0xcd, 0x3f, 0x36
-+	.byte	0xbe, 0xce, 0x3f, 0x3a
-+	.byte	0xbe, 0xcf, 0x3f, 0x3e
-+	.byte	0xbb, 0xb7, 0xe6, 0xa0
-+
-+!copy output back to array
-+	std	%f60, [%o2]
-+	sub	%o3, 8, %o3
-+	add	%o1, 8, %o1
-+	brnz	%o3, des_cbc_encrypt_loop
-+	add	%o2, 8, %o2
-+
-+	retl
-+	std	%f60, [%o4]
-+
-+	SET_SIZE(t4_des_cbc_encrypt)
-+
-+
-+
-+	ENTRY(t4_des_cbc_decrypt)
-+
-+	ldd	[%o4], %f60
-+des_cbc_decrypt_loop:
-+!load input
-+	ldd	[%o1], %f62
-+	ldx	[%o1], %o5
-+
-+!perform the cipher transformation
-+	!des_ip	%f62, %f62
-+	!des_round %f0,  %f2,  %f62, %f62
-+	!des_round %f4,  %f6,  %f62, %f62
-+	!des_round %f8,  %f10, %f62, %f62
-+	!des_round %f12, %f14, %f62, %f62
-+	!des_round %f16, %f18, %f62, %f62
-+	!des_round %f20, %f22, %f62, %f62
-+	!des_round %f24, %f26, %f62, %f62
-+	!des_round %f28, %f30, %f62, %f62
-+	!des_iip	%f62, %f62
-+	.byte	0xbf, 0xb7, 0xe6, 0x80
-+	.byte	0xbe, 0xc8, 0x3f, 0x22
-+	.byte	0xbe, 0xc9, 0x3f, 0x26
-+	.byte	0xbe, 0xca, 0x3f, 0x2a
-+	.byte	0xbe, 0xcb, 0x3f, 0x2e
-+	.byte	0xbe, 0xcc, 0x3f, 0x32
-+	.byte	0xbe, 0xcd, 0x3f, 0x36
-+	.byte	0xbe, 0xce, 0x3f, 0x3a
-+	.byte	0xbe, 0xcf, 0x3f, 0x3e
-+	.byte	0xbf, 0xb7, 0xe6, 0xa0
-+	fxor	%f60, %f62, %f62
-+	movxtod	%o5, %f60
-+
-+!copy output back to array
-+	std	%f62, [%o2]
-+	sub	%o3, 8, %o3
-+	add	%o1, 8, %o1
-+	brnz	%o3, des_cbc_decrypt_loop
-+	add	%o2, 8, %o2
-+
-+	retl
-+	std	%f60, [%o4]
-+
-+	SET_SIZE(t4_des_cbc_decrypt)
-+
-+
-+
-+	ENTRY(t4_des3_ecb_crypt)
-+
-+des3_ecb_loop:
-+!load input
-+	ldd	[%o1], %f62
-+
-+!perform the cipher transformation
-+	!des_ip	%f62, %f62
-+	!des_round %f0,  %f2,  %f62, %f62
-+	!des_round %f4,  %f6,  %f62, %f62
-+	!des_round %f8,  %f10, %f62, %f62
-+	!des_round %f12, %f14, %f62, %f62
-+	!des_round %f16, %f18, %f62, %f62
-+	.byte	0xbf, 0xb7, 0xe6, 0x80
-+	.byte	0xbe, 0xc8, 0x3f, 0x22
-+	.byte	0xbe, 0xc9, 0x3f, 0x26
-+	.byte	0xbe, 0xca, 0x3f, 0x2a
-+	.byte	0xbe, 0xcb, 0x3f, 0x2e
-+	.byte	0xbe, 0xcc, 0x3f, 0x32
-+
-+	ldd	[%o0 + 0xf0], %f16
-+	ldd	[%o0 + 0xf8], %f18
-+	!des_round %f20, %f22, %f62, %f62
-+	.byte	0xbe, 0xcd, 0x3f, 0x36
-+	ldd	[%o0 + 0x100], %f20
-+	ldd	[%o0 + 0x108], %f22
-+	!des_round %f24, %f26, %f62, %f62
-+	.byte	0xbe, 0xce, 0x3f, 0x3a
-+	ldd	[%o0 + 0x110], %f24
-+	ldd	[%o0 + 0x118], %f26
-+	!des_round %f28, %f30, %f62, %f62
-+	.byte	0xbe, 0xcf, 0x3f, 0x3e
-+	ldd	[%o0 + 0x120], %f28
-+	ldd	[%o0 + 0x128], %f30
-+
-+	!des_iip	%f62, %f62
-+	!des_ip	%f62, %f62
-+	.byte	0xbf, 0xb7, 0xe6, 0xa0
-+	.byte	0xbf, 0xb7, 0xe6, 0x80
-+
-+	!des_round %f32, %f34, %f62, %f62
-+	.byte	0xbe, 0xc8, 0x7f, 0x23
-+	ldd	[%o0 + 0x130], %f0
-+	ldd	[%o0 + 0x138], %f2
-+	!des_round %f36, %f38,  %f62, %f62
-+	.byte	0xbe, 0xc9, 0x7f, 0x27
-+	ldd	[%o0 + 0x140], %f4
-+	ldd	[%o0 + 0x148], %f6
-+	!des_round %f40, %f42, %f62, %f62
-+	.byte	0xbe, 0xca, 0x7f, 0x2b
-+	ldd	[%o0 + 0x150], %f8
-+	ldd	[%o0 + 0x158], %f10
-+	!des_round %f44, %f46, %f62, %f62
-+	.byte	0xbe, 0xcb, 0x7f, 0x2f
-+	ldd	[%o0 + 0x160], %f12
-+	ldd	[%o0 + 0x168], %f14
-+	!des_round %f48, %f50, %f62, %f62
-+	!des_round %f52, %f54, %f62, %f62
-+	!des_round %f56, %f58, %f62, %f62
-+	!des_round %f16, %f18, %f62, %f62
-+	.byte	0xbe, 0xcc, 0x7f, 0x33
-+	.byte	0xbe, 0xcd, 0x7f, 0x37
-+	.byte	0xbe, 0xce, 0x7f, 0x3b
-+	.byte	0xbe, 0xcc, 0x3f, 0x32
-+	ldd	[%o0 + 0x170], %f16
-+	ldd	[%o0 + 0x178], %f18
-+
-+	!des_iip	%f62, %f62
-+	!des_ip	%f62, %f62
-+	.byte	0xbf, 0xb7, 0xe6, 0xa0
-+	.byte	0xbf, 0xb7, 0xe6, 0x80
-+
-+	!des_round %f20, %f22, %f62, %f62
-+	.byte	0xbe, 0xcd, 0x3f, 0x36
-+	ldd	[%o0 + 0x50], %f20
-+	ldd	[%o0 + 0x58], %f22
-+	!des_round %f24, %f26, %f62, %f62
-+	.byte	0xbe, 0xce, 0x3f, 0x3a
-+	ldd	[%o0 + 0x60], %f24
-+	ldd	[%o0 + 0x68], %f26
-+	!des_round %f28, %f30, %f62, %f62
-+	.byte	0xbe, 0xcf, 0x3f, 0x3e
-+	ldd	[%o0 + 0x70], %f28
-+	ldd	[%o0 + 0x78], %f30
-+	!des_round %f0,  %f2,  %f62, %f62
-+	.byte	0xbe, 0xc8, 0x3f, 0x22
-+	ldd	[%o0], %f0
-+	ldd	[%o0 + 0x8], %f2
-+	!des_round %f4,  %f6,  %f62, %f62
-+	.byte	0xbe, 0xc9, 0x3f, 0x26
-+
-+	ldd	[%o0 + 0x10], %f4
-+	ldd	[%o0 + 0x18], %f6
-+	!des_round %f8,  %f10, %f62, %f62
-+	.byte	0xbe, 0xca, 0x3f, 0x2a
-+	ldd	[%o0 + 0x20], %f8
-+	ldd	[%o0 + 0x28], %f10
-+	!des_round %f12, %f14, %f62, %f62
-+	.byte	0xbe, 0xcb, 0x3f, 0x2e
-+	ldd	[%o0 + 0x30], %f12
-+	ldd	[%o0 + 0x38], %f14
-+	!des_round %f16, %f18, %f62, %f62
-+	.byte	0xbe, 0xcc, 0x3f, 0x32
-+	ldd	[%o0 + 0x40], %f16
-+	ldd	[%o0 + 0x48], %f18
-+
-+	!des_iip	%f62, %f62
-+	.byte	0xbf, 0xb7, 0xe6, 0xa0
-+
-+!copy output back to array
-+	std	%f62, [%o2]
-+	sub	%o3, 8, %o3
-+	add	%o1, 8, %o1
-+	brnz	%o3, des3_ecb_loop
-+	add	%o2, 8, %o2
-+
-+	retl
-+	nop
-+
-+	SET_SIZE(t4_des3_ecb_crypt)
-+
-+
-+	ENTRY(t4_des3_cbc_encrypt)
-+
-+	ldd	[%o4], %f62
-+des3_cbc_encrypt_loop:
-+!load input
-+	ldd	[%o1], %f60
-+	fxor	%f60, %f62, %f62
-+
-+!perform the cipher transformation
-+	!des_ip	%f62, %f62
-+	.byte	0xbf, 0xb7, 0xe6, 0x80
-+	!des_round %f0,  %f2,  %f62, %f62
-+	!des_round %f4,  %f6,  %f62, %f62
-+	!des_round %f8,  %f10, %f62, %f62
-+	!des_round %f12, %f14, %f62, %f62
-+	!des_round %f16, %f18, %f62, %f62
-+	.byte	0xbe, 0xc8, 0x3f, 0x22
-+	.byte	0xbe, 0xc9, 0x3f, 0x26
-+	.byte	0xbe, 0xca, 0x3f, 0x2a
-+	.byte	0xbe, 0xcb, 0x3f, 0x2e
-+	.byte	0xbe, 0xcc, 0x3f, 0x32
-+	ldd	[%o0 + 0xf0], %f16
-+	ldd	[%o0 + 0xf8], %f18
-+	!des_round %f20, %f22, %f62, %f62
-+	.byte	0xbe, 0xcd, 0x3f, 0x36

@@ Diff output truncated at 100000 characters. @@
This was sent by the SourceForge.net collaborative development platform, the world's largest Open Source development site.



More information about the devel mailing list