forked from pool/openssl-1_1
02427a3414
* All the minor versions of the 1.1.x openssl branch have the same sonum and keep ABI compatibility - Remove bit obsolete syntax - Use %license macro - Don't disable afalgeng on aarch64 - Add support for s390x CPACF enhancements (fate#321518) patches taken from https://github.com/openssl/openssl/pull/2859: * 0002-crypto-modes-asm-ghash-s390x.pl-fix-gcm_gmult_4bit-K.patch * 0004-s390x-assembly-pack-add-OPENSSL_s390xcap-environment.patch * 0005-s390x-assembly-pack-add-OPENSSL_s390xcap-man-page.patch * 0006-s390x-assembly-pack-extended-s390x-capability-vector.patch * 0007-crypto-evp-e_aes.c-add-foundations-for-extended-s390.patch * 0008-s390x-assembly-pack-extended-s390x-capability-vector.patch * 0009-crypto-aes-asm-aes-s390x.pl-add-KMA-code-path.patch * 0010-doc-man3-OPENSSL_s390xcap.pod-update-KMA.patch * 0011-crypto-aes-asm-aes-s390x.pl-add-CFI-annotations-KMA-.patch * 0012-s390x-assembly-pack-add-KMA-code-path-for-aes-gcm.patch * 0013-crypto-aes-asm-aes-s390x.pl-add-CFI-annotations-KMA-.patch - Do not filter pkgconfig() provides/requires. - Obsolete openssl-1_0_0 by openssl-1_1_0: this is required for a clean upgrade path as an aid to zypp (boo#1070003). - Update to 1.1.0g OpenSSL Security Advisory [02 Nov 2017] OBS-URL: https://build.opensuse.org/package/show/security:tls/openssl-1_1?expand=0&rev=2
336 lines
11 KiB
Diff
336 lines
11 KiB
Diff
From acef148f0aac18d78c3c857065b3a1274279b2df Mon Sep 17 00:00:00 2001
|
|
From: Patrick Steuer <patrick.steuer@de.ibm.com>
|
|
Date: Sat, 25 Feb 2017 10:05:12 +0100
|
|
Subject: [PATCH 12/44] s390x assembly pack: add KMA code path for aes-gcm.
|
|
|
|
Signed-off-by: Patrick Steuer <patrick.steuer@de.ibm.com>
|
|
---
|
|
crypto/aes/asm/aes-s390x.pl | 52 ++++++++++++
|
|
crypto/evp/e_aes.c | 200 ++++++++++++++++++++++++++++++++++++++++++--
|
|
crypto/modes/gcm128.c | 4 +
|
|
crypto/s390x_arch.h | 5 ++
|
|
4 files changed, 253 insertions(+), 8 deletions(-)
|
|
|
|
Index: openssl-1.1.0g/crypto/aes/asm/aes-s390x.pl
|
|
===================================================================
|
|
--- openssl-1.1.0g.orig/crypto/aes/asm/aes-s390x.pl 2018-01-09 17:35:12.231011406 +0100
|
|
+++ openssl-1.1.0g/crypto/aes/asm/aes-s390x.pl 2018-01-09 17:35:16.795082242 +0100
|
|
@@ -2257,6 +2257,58 @@ $code.=<<___;
|
|
.size AES_xts_decrypt,.-AES_xts_decrypt
|
|
___
|
|
}
|
|
+
|
|
+################
|
|
+# void s390x_aes_gcm_blocks(unsigned char *out, GCM128_CONTEXT *ctx,
|
|
+# const unsigned char *in, size_t len,
|
|
+# const unsigned char *aad, size_t alen,
|
|
+# const AES_KEY *key, int enc)
|
|
+{
|
|
+my ($out,$ctx,$in,$len,$aad,$alen,$key,$enc) = map("%r$_",(2..9));
|
|
+$code.=<<___ if (!$softonly);
|
|
+.globl s390x_aes_gcm_blocks
|
|
+.type s390x_aes_gcm_blocks,\@function
|
|
+.align 16
|
|
+s390x_aes_gcm_blocks:
|
|
+ stm$g $alen,$enc,7*$SIZE_T($sp)
|
|
+ lm$g $alen,$enc,$stdframe($sp)
|
|
+
|
|
+ aghi $sp,-112
|
|
+
|
|
+ lmg %r0,%r1,0($ctx)
|
|
+ ahi %r1,-1
|
|
+
|
|
+ mvc 16(32,$sp),64($ctx) # copy Xi/H
|
|
+ #mvc 48(16,$sp),48($ctx) # copy len
|
|
+ mvc 80(32,$sp),0($key) # copy key
|
|
+ st %r1,12($sp) # copy Yi
|
|
+ stmg %r0,%r1,64($sp)
|
|
+
|
|
+ lhi %r1,128
|
|
+ l %r0,240($key) # kma capability vector checked by caller
|
|
+ sll $enc,7
|
|
+ xr $enc,%r1
|
|
+ or %r0,$enc
|
|
+
|
|
+ la %r1,0($sp)
|
|
+
|
|
+ .long 0xb9296024 # kma $out,$aad,$in
|
|
+ brc 1,.-4 # pay attention to "partial completion"
|
|
+
|
|
+ l %r0,12($sp)
|
|
+ mvc 64(16,$ctx),16($sp) # update Xi
|
|
+ xc 0(112,$sp),0($sp) # wipe stack
|
|
+
|
|
+ la $sp,112($sp)
|
|
+ ahi %r0,1
|
|
+ st %r0,12($ctx)
|
|
+
|
|
+ lm$g $alen,$enc,7*$SIZE_T($sp)
|
|
+ br $ra
|
|
+.size s390x_aes_gcm_blocks,.-s390x_aes_gcm_blocks
|
|
+___
|
|
+}
|
|
+
|
|
$code.=<<___;
|
|
.string "AES for s390x, CRYPTOGAMS by <appro\@openssl.org>"
|
|
___
|
|
Index: openssl-1.1.0g/crypto/evp/e_aes.c
|
|
===================================================================
|
|
--- openssl-1.1.0g.orig/crypto/evp/e_aes.c 2018-01-09 17:35:12.199010909 +0100
|
|
+++ openssl-1.1.0g/crypto/evp/e_aes.c 2018-01-09 17:35:12.239011531 +0100
|
|
@@ -960,7 +960,7 @@ const EVP_CIPHER *EVP_aes_##keylen##_##m
|
|
* If KM and KMC support the function code, AES_KEY structure holds
|
|
* key/function code (instead of key schedule/number of rounds).
|
|
*/
|
|
-# define S390X_AES_FC (((AES_KEY *)(key))->rounds)
|
|
+# define S390X_AES_FC(key) (((AES_KEY *)(key))->rounds)
|
|
|
|
# define S390X_aes_128_CAPABLE ((OPENSSL_s390xcap_P[5]&S390X_KM_AES_128)&&\
|
|
(OPENSSL_s390xcap_P[7]&S390X_KMC_AES_128))
|
|
@@ -969,6 +969,11 @@ const EVP_CIPHER *EVP_aes_##keylen##_##m
|
|
# define S390X_aes_256_CAPABLE ((OPENSSL_s390xcap_P[5]&S390X_KM_AES_256)&&\
|
|
(OPENSSL_s390xcap_P[7]&S390X_KMC_AES_256))
|
|
|
|
+void s390x_aes_gcm_blocks(unsigned char *out, GCM128_CONTEXT *ctx,
|
|
+ const unsigned char *in, size_t len,
|
|
+ const unsigned char *aad, size_t alen,
|
|
+ const AES_KEY *key, int enc);
|
|
+
|
|
# define s390x_aes_init_key aes_init_key
|
|
static int s390x_aes_init_key(EVP_CIPHER_CTX *ctx, const unsigned char *key,
|
|
const unsigned char *iv, int enc);
|
|
@@ -1029,18 +1034,197 @@ static int s390x_aes_cfb1_cipher(EVP_CIP
|
|
static int s390x_aes_ctr_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
|
|
const unsigned char *in, size_t len);
|
|
|
|
-# define S390X_aes_128_gcm_CAPABLE 0
|
|
-# define S390X_aes_192_gcm_CAPABLE 0
|
|
-# define S390X_aes_256_gcm_CAPABLE 0
|
|
+# define S390X_aes_128_gcm_CAPABLE (S390X_aes_128_CAPABLE&&\
|
|
+ OPENSSL_s390xcap_P[17]\
|
|
+ &S390X_KMA_GCM_AES_128)
|
|
+# define S390X_aes_192_gcm_CAPABLE (S390X_aes_192_CAPABLE&&\
|
|
+ OPENSSL_s390xcap_P[17]\
|
|
+ &S390X_KMA_GCM_AES_192)
|
|
+# define S390X_aes_256_gcm_CAPABLE (S390X_aes_256_CAPABLE&&\
|
|
+ OPENSSL_s390xcap_P[17]\
|
|
+ &S390X_KMA_GCM_AES_256)
|
|
+
|
|
+static int s390x_aes_gcm(GCM128_CONTEXT *ctx, const unsigned char *in,
|
|
+ unsigned char *out, size_t len, int enc)
|
|
+{
|
|
+ int n;
|
|
+ size_t rem;
|
|
+ u64 mlen = ctx->len.u[1];
|
|
+ unsigned char tmp;
|
|
+
|
|
+ mlen += len;
|
|
+
|
|
+ if (mlen > ((1ULL << 36) - 32) || (sizeof(len) == 8 && mlen < len))
|
|
+ return -1;
|
|
+
|
|
+ ctx->len.u[1] = mlen;
|
|
+
|
|
+ if (ctx->ares) {
|
|
+ (*ctx->gmult)(ctx->Xi.u, ctx->Htable);
|
|
+ ctx->ares = 0;
|
|
+ }
|
|
+ S390X_AES_FC(ctx->key) |= S390X_KMA_LAAD;
|
|
+ n = ctx->mres;
|
|
+
|
|
+ if (n) {
|
|
+ while (n && len) {
|
|
+ tmp = *in;
|
|
+ *out = tmp ^ ctx->EKi.c[n];
|
|
+ ctx->Xi.c[n] ^= enc ? *out : tmp;
|
|
+ n = (n + 1) % AES_BLOCK_SIZE;
|
|
+ --len;
|
|
+ ++in;
|
|
+ ++out;
|
|
+ }
|
|
+ if (n == 0) {
|
|
+ (*ctx->gmult)(ctx->Xi.u, ctx->Htable);
|
|
+ } else {
|
|
+ ctx->mres = n;
|
|
+ return 0;
|
|
+ }
|
|
+ }
|
|
+ rem = len % AES_BLOCK_SIZE;
|
|
+ len -= rem;
|
|
+
|
|
+ s390x_aes_gcm_blocks(out, ctx, in, len, NULL, 0, ctx->key, enc);
|
|
+
|
|
+ if (rem) {
|
|
+ in += len;
|
|
+ out += len;
|
|
+ (*ctx->block)(ctx->Yi.c, ctx->EKi.c, ctx->key);
|
|
+ ++ctx->Yi.d[3];
|
|
+ while (rem--) {
|
|
+ tmp = in[n];
|
|
+ out[n] = tmp ^ ctx->EKi.c[n];
|
|
+ ctx->Xi.c[n] ^= enc ? out[n] : tmp;
|
|
+ ++n;
|
|
+ }
|
|
+ }
|
|
+
|
|
+ ctx->mres = n;
|
|
+ return 0;
|
|
+}
|
|
|
|
-# define s390x_aes_gcm_init_key aes_gcm_init_key
|
|
static int s390x_aes_gcm_init_key(EVP_CIPHER_CTX *ctx,
|
|
const unsigned char *key,
|
|
- const unsigned char *iv, int enc);
|
|
+ const unsigned char *iv, int enc)
|
|
+{
|
|
+ EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
|
|
+ const int keybitlen = EVP_CIPHER_CTX_key_length(ctx) * 8;
|
|
+
|
|
+ if (!iv && !key)
|
|
+ return 1;
|
|
+
|
|
+ if (key) {
|
|
+ AES_set_encrypt_key(key, keybitlen, &gctx->ks.ks);
|
|
+ CRYPTO_gcm128_init(&gctx->gcm, &gctx->ks, (block128_f)AES_encrypt);
|
|
+ S390X_AES_FC(&gctx->ks) |= S390X_KMA_HS;
|
|
+
|
|
+ if (iv == NULL && gctx->iv_set)
|
|
+ iv = gctx->iv;
|
|
+
|
|
+ if (iv) {
|
|
+ CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
|
|
+ gctx->iv_set = 1;
|
|
+ }
|
|
+ gctx->key_set = 1;
|
|
+ } else {
|
|
+ if (gctx->key_set)
|
|
+ CRYPTO_gcm128_setiv(&gctx->gcm, iv, gctx->ivlen);
|
|
+ else
|
|
+ memcpy(gctx->iv, iv, gctx->ivlen);
|
|
+
|
|
+ gctx->iv_set = 1;
|
|
+ gctx->iv_gen = 0;
|
|
+ }
|
|
+ return 1;
|
|
+}
|
|
+
|
|
+static int s390x_aes_gcm_tls_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
|
|
+ const unsigned char *in, size_t len)
|
|
+{
|
|
+ EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
|
|
+ unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
|
|
+ int enc = EVP_CIPHER_CTX_encrypting(ctx);
|
|
+ int rv = -1;
|
|
+
|
|
+ if (out != in || len < (EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN))
|
|
+ return -1;
|
|
+
|
|
+ if (EVP_CIPHER_CTX_ctrl(ctx, enc ? EVP_CTRL_GCM_IV_GEN :
|
|
+ EVP_CTRL_GCM_SET_IV_INV,
|
|
+ EVP_GCM_TLS_EXPLICIT_IV_LEN, out) <= 0)
|
|
+ goto err;
|
|
+
|
|
+ if (CRYPTO_gcm128_aad(&gctx->gcm, buf, gctx->tls_aad_len))
|
|
+ goto err;
|
|
+
|
|
+ in += EVP_GCM_TLS_EXPLICIT_IV_LEN;
|
|
+ out += EVP_GCM_TLS_EXPLICIT_IV_LEN;
|
|
+ len -= EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
|
|
+
|
|
+ if (s390x_aes_gcm(&gctx->gcm, in, out, len, enc))
|
|
+ goto err;
|
|
+
|
|
+ if (enc) {
|
|
+ out += len;
|
|
+ CRYPTO_gcm128_tag(&gctx->gcm, out, EVP_GCM_TLS_TAG_LEN);
|
|
+ rv = len + EVP_GCM_TLS_EXPLICIT_IV_LEN + EVP_GCM_TLS_TAG_LEN;
|
|
+ } else {
|
|
+ CRYPTO_gcm128_tag(&gctx->gcm, buf, EVP_GCM_TLS_TAG_LEN);
|
|
+
|
|
+ if (CRYPTO_memcmp(buf, in + len, EVP_GCM_TLS_TAG_LEN)) {
|
|
+ OPENSSL_cleanse(out, len);
|
|
+ goto err;
|
|
+ }
|
|
+ rv = len;
|
|
+ }
|
|
+ err:
|
|
+ gctx->iv_set = 0;
|
|
+ gctx->tls_aad_len = -1;
|
|
+ return rv;
|
|
+}
|
|
|
|
-# define s390x_aes_gcm_cipher aes_gcm_cipher
|
|
static int s390x_aes_gcm_cipher(EVP_CIPHER_CTX *ctx, unsigned char *out,
|
|
- const unsigned char *in, size_t len);
|
|
+ const unsigned char *in, size_t len)
|
|
+{
|
|
+ EVP_AES_GCM_CTX *gctx = EVP_C_DATA(EVP_AES_GCM_CTX,ctx);
|
|
+ unsigned char *buf = EVP_CIPHER_CTX_buf_noconst(ctx);
|
|
+ int enc = EVP_CIPHER_CTX_encrypting(ctx);
|
|
+
|
|
+ if (!gctx->key_set)
|
|
+ return -1;
|
|
+
|
|
+ if (gctx->tls_aad_len >= 0)
|
|
+ return s390x_aes_gcm_tls_cipher(ctx, out, in, len);
|
|
+
|
|
+ if (!gctx->iv_set)
|
|
+ return -1;
|
|
+
|
|
+ if (in) {
|
|
+ if (out == NULL) {
|
|
+ if (CRYPTO_gcm128_aad(&gctx->gcm, in, len))
|
|
+ return -1;
|
|
+ } else {
|
|
+ if (s390x_aes_gcm(&gctx->gcm, in, out, len, enc))
|
|
+ return -1;
|
|
+ }
|
|
+ return len;
|
|
+ } else {
|
|
+ if (enc) {
|
|
+ gctx->taglen = 16;
|
|
+ CRYPTO_gcm128_tag(&gctx->gcm, buf, gctx->taglen);
|
|
+ } else {
|
|
+ if (gctx->taglen < 0)
|
|
+ return -1;
|
|
+
|
|
+ if (CRYPTO_gcm128_finish(&gctx->gcm, buf, gctx->taglen))
|
|
+ return -1;
|
|
+ }
|
|
+ gctx->iv_set = 0;
|
|
+ return 0;
|
|
+ }
|
|
+}
|
|
|
|
# define S390X_aes_128_xts_CAPABLE 1 /* checked by callee */
|
|
# define S390X_aes_256_xts_CAPABLE 1
|
|
Index: openssl-1.1.0g/crypto/modes/gcm128.c
|
|
===================================================================
|
|
--- openssl-1.1.0g.orig/crypto/modes/gcm128.c 2017-11-02 15:29:03.000000000 +0100
|
|
+++ openssl-1.1.0g/crypto/modes/gcm128.c 2018-01-09 17:35:12.239011531 +0100
|
|
@@ -817,6 +817,10 @@ void CRYPTO_gcm128_init(GCM128_CONTEXT *
|
|
ctx->gmult = gcm_gmult_4bit;
|
|
CTX__GHASH(gcm_ghash_4bit);
|
|
}
|
|
+# elif defined(GHASH_ASM)
|
|
+ gcm_init_4bit(ctx->Htable, ctx->H.u);
|
|
+ ctx->gmult = gcm_gmult_4bit;
|
|
+ CTX__GHASH(gcm_ghash_4bit);
|
|
# else
|
|
gcm_init_4bit(ctx->Htable, ctx->H.u);
|
|
# endif
|
|
Index: openssl-1.1.0g/crypto/s390x_arch.h
|
|
===================================================================
|
|
--- openssl-1.1.0g.orig/crypto/s390x_arch.h 2018-01-09 17:35:12.207011034 +0100
|
|
+++ openssl-1.1.0g/crypto/s390x_arch.h 2018-01-09 17:35:12.239011531 +0100
|
|
@@ -45,4 +45,9 @@ extern uint64_t OPENSSL_s390xcap_P[];
|
|
# define S390X_KMA_GCM_AES_192 (1ULL << 44)
|
|
# define S390X_KMA_GCM_AES_128 (1ULL << 45)
|
|
|
|
+/* %r0 flags */
|
|
+# define S390X_KMA_LPC (1ULL << 8)
|
|
+# define S390X_KMA_LAAD (1ULL << 9)
|
|
+# define S390X_KMA_HS (1ULL << 10)
|
|
+
|
|
#endif
|