summaryrefslogtreecommitdiff
path: root/secure/lib
diff options
context:
space:
mode:
authorJung-uk Kim <jkim@FreeBSD.org>2020-03-18 02:13:12 +0000
committerJung-uk Kim <jkim@FreeBSD.org>2020-03-18 02:13:12 +0000
commit17f01e9963948a18f55eb97173123702c5dae671 (patch)
treebc68d611f898931c657418447120d2c674c1ff38 /secure/lib
parent889d304bb46d7551805fd8e79815a50a4cddda6b (diff)
parentaa144ced5d61b5c7fb74acaebb37d85bd08f0416 (diff)
downloadsrc-test-17f01e9963948a18f55eb97173123702c5dae671.tar.gz
src-test-17f01e9963948a18f55eb97173123702c5dae671.zip
Merge OpenSSL 1.1.1e.
Notes
Notes: svn path=/head/; revision=359060
Diffstat (limited to 'secure/lib')
-rw-r--r--secure/lib/libcrypto/Makefile.inc4
-rw-r--r--secure/lib/libcrypto/aarch64/ecp_nistz256-armv8.S71
-rw-r--r--secure/lib/libcrypto/aarch64/sha256-armv8.S2
-rw-r--r--secure/lib/libcrypto/aarch64/sha512-armv8.S2
-rw-r--r--secure/lib/libcrypto/amd64/aesni-gcm-x86_64.S784
-rw-r--r--secure/lib/libcrypto/amd64/aesni-mb-x86_64.S965
-rw-r--r--secure/lib/libcrypto/amd64/aesni-sha1-x86_64.S1354
-rw-r--r--secure/lib/libcrypto/amd64/aesni-sha256-x86_64.S4354
-rw-r--r--secure/lib/libcrypto/amd64/aesni-x86_64.S18
-rw-r--r--secure/lib/libcrypto/amd64/chacha-x86_64.S1026
-rw-r--r--secure/lib/libcrypto/amd64/cmll-x86_64.S8
-rw-r--r--secure/lib/libcrypto/amd64/ecp_nistz256-x86_64.S2093
-rw-r--r--secure/lib/libcrypto/amd64/ghash-x86_64.S475
-rw-r--r--secure/lib/libcrypto/amd64/keccak1600-x86_64.S2
-rw-r--r--secure/lib/libcrypto/amd64/poly1305-x86_64.S1787
-rw-r--r--secure/lib/libcrypto/amd64/rc4-x86_64.S9
-rw-r--r--secure/lib/libcrypto/amd64/rsaz-avx2.S1749
-rw-r--r--secure/lib/libcrypto/amd64/rsaz-x86_64.S863
-rw-r--r--secure/lib/libcrypto/amd64/sha1-mb-x86_64.S4315
-rw-r--r--secure/lib/libcrypto/amd64/sha1-x86_64.S2831
-rw-r--r--secure/lib/libcrypto/amd64/sha256-mb-x86_64.S4672
-rw-r--r--secure/lib/libcrypto/amd64/sha256-x86_64.S2347
-rw-r--r--secure/lib/libcrypto/amd64/sha512-x86_64.S3636
-rw-r--r--secure/lib/libcrypto/amd64/x25519-x86_64.S388
-rw-r--r--secure/lib/libcrypto/amd64/x86_64-mont.S380
-rw-r--r--secure/lib/libcrypto/amd64/x86_64-mont5.S1375
-rw-r--r--secure/lib/libcrypto/arm/aes-armv4.S2
-rw-r--r--secure/lib/libcrypto/arm/bsaes-armv7.S2
-rw-r--r--secure/lib/libcrypto/arm/ecp_nistz256-armv4.S183
-rw-r--r--secure/lib/libcrypto/arm/sha256-armv4.S2
-rw-r--r--secure/lib/libcrypto/arm/sha512-armv4.S2
-rw-r--r--secure/lib/libcrypto/i386/chacha-x86.S960
-rw-r--r--secure/lib/libcrypto/i386/ecp_nistz256-x86.S36
-rw-r--r--secure/lib/libcrypto/i386/poly1305-x86.S1110
-rw-r--r--secure/lib/libcrypto/i386/sha1-586.S2350
-rw-r--r--secure/lib/libcrypto/i386/sha256-586.S4496
-rw-r--r--secure/lib/libcrypto/man/man3/ADMISSIONS.34
-rw-r--r--secure/lib/libcrypto/man/man3/ASN1_INTEGER_get_int64.38
-rw-r--r--secure/lib/libcrypto/man/man3/ASN1_ITEM_lookup.34
-rw-r--r--secure/lib/libcrypto/man/man3/ASN1_OBJECT_new.34
-rw-r--r--secure/lib/libcrypto/man/man3/ASN1_STRING_TABLE_add.34
-rw-r--r--secure/lib/libcrypto/man/man3/ASN1_STRING_length.34
-rw-r--r--secure/lib/libcrypto/man/man3/ASN1_STRING_new.34
-rw-r--r--secure/lib/libcrypto/man/man3/ASN1_STRING_print_ex.34
-rw-r--r--secure/lib/libcrypto/man/man3/ASN1_TIME_set.34
-rw-r--r--secure/lib/libcrypto/man/man3/ASN1_TYPE_get.38
-rw-r--r--secure/lib/libcrypto/man/man3/ASN1_generate_nconf.34
-rw-r--r--secure/lib/libcrypto/man/man3/ASYNC_WAIT_CTX_new.34
-rw-r--r--secure/lib/libcrypto/man/man3/ASYNC_start_job.34
-rw-r--r--secure/lib/libcrypto/man/man3/BF_encrypt.34
-rw-r--r--secure/lib/libcrypto/man/man3/BIO_ADDR.34
-rw-r--r--secure/lib/libcrypto/man/man3/BIO_ADDRINFO.34
-rw-r--r--secure/lib/libcrypto/man/man3/BIO_connect.34
-rw-r--r--secure/lib/libcrypto/man/man3/BIO_ctrl.34
-rw-r--r--secure/lib/libcrypto/man/man3/BIO_f_base64.34
-rw-r--r--secure/lib/libcrypto/man/man3/BIO_f_buffer.324
-rw-r--r--secure/lib/libcrypto/man/man3/BIO_f_cipher.34
-rw-r--r--secure/lib/libcrypto/man/man3/BIO_f_md.34
-rw-r--r--secure/lib/libcrypto/man/man3/BIO_f_null.34
-rw-r--r--secure/lib/libcrypto/man/man3/BIO_f_ssl.34
-rw-r--r--secure/lib/libcrypto/man/man3/BIO_find_type.34
-rw-r--r--secure/lib/libcrypto/man/man3/BIO_get_data.34
-rw-r--r--secure/lib/libcrypto/man/man3/BIO_get_ex_new_index.310
-rw-r--r--secure/lib/libcrypto/man/man3/BIO_meth_new.34
-rw-r--r--secure/lib/libcrypto/man/man3/BIO_new.34
-rw-r--r--secure/lib/libcrypto/man/man3/BIO_new_CMS.34
-rw-r--r--secure/lib/libcrypto/man/man3/BIO_parse_hostserv.34
-rw-r--r--secure/lib/libcrypto/man/man3/BIO_printf.34
-rw-r--r--secure/lib/libcrypto/man/man3/BIO_push.34
-rw-r--r--secure/lib/libcrypto/man/man3/BIO_read.34
-rw-r--r--secure/lib/libcrypto/man/man3/BIO_s_accept.34
-rw-r--r--secure/lib/libcrypto/man/man3/BIO_s_bio.34
-rw-r--r--secure/lib/libcrypto/man/man3/BIO_s_connect.34
-rw-r--r--secure/lib/libcrypto/man/man3/BIO_s_fd.34
-rw-r--r--secure/lib/libcrypto/man/man3/BIO_s_file.34
-rw-r--r--secure/lib/libcrypto/man/man3/BIO_s_mem.34
-rw-r--r--secure/lib/libcrypto/man/man3/BIO_s_null.34
-rw-r--r--secure/lib/libcrypto/man/man3/BIO_s_socket.34
-rw-r--r--secure/lib/libcrypto/man/man3/BIO_set_callback.34
-rw-r--r--secure/lib/libcrypto/man/man3/BIO_should_retry.34
-rw-r--r--secure/lib/libcrypto/man/man3/BN_BLINDING_new.34
-rw-r--r--secure/lib/libcrypto/man/man3/BN_CTX_new.34
-rw-r--r--secure/lib/libcrypto/man/man3/BN_CTX_start.34
-rw-r--r--secure/lib/libcrypto/man/man3/BN_add.34
-rw-r--r--secure/lib/libcrypto/man/man3/BN_add_word.34
-rw-r--r--secure/lib/libcrypto/man/man3/BN_bn2bin.34
-rw-r--r--secure/lib/libcrypto/man/man3/BN_cmp.34
-rw-r--r--secure/lib/libcrypto/man/man3/BN_copy.34
-rw-r--r--secure/lib/libcrypto/man/man3/BN_generate_prime.34
-rw-r--r--secure/lib/libcrypto/man/man3/BN_mod_inverse.34
-rw-r--r--secure/lib/libcrypto/man/man3/BN_mod_mul_montgomery.34
-rw-r--r--secure/lib/libcrypto/man/man3/BN_mod_mul_reciprocal.34
-rw-r--r--secure/lib/libcrypto/man/man3/BN_new.34
-rw-r--r--secure/lib/libcrypto/man/man3/BN_num_bytes.34
-rw-r--r--secure/lib/libcrypto/man/man3/BN_rand.34
-rw-r--r--secure/lib/libcrypto/man/man3/BN_security_bits.34
-rw-r--r--secure/lib/libcrypto/man/man3/BN_set_bit.34
-rw-r--r--secure/lib/libcrypto/man/man3/BN_swap.34
-rw-r--r--secure/lib/libcrypto/man/man3/BN_zero.34
-rw-r--r--secure/lib/libcrypto/man/man3/BUF_MEM_new.34
-rw-r--r--secure/lib/libcrypto/man/man3/CMS_add0_cert.34
-rw-r--r--secure/lib/libcrypto/man/man3/CMS_add1_recipient_cert.34
-rw-r--r--secure/lib/libcrypto/man/man3/CMS_add1_signer.34
-rw-r--r--secure/lib/libcrypto/man/man3/CMS_compress.34
-rw-r--r--secure/lib/libcrypto/man/man3/CMS_decrypt.34
-rw-r--r--secure/lib/libcrypto/man/man3/CMS_encrypt.34
-rw-r--r--secure/lib/libcrypto/man/man3/CMS_final.34
-rw-r--r--secure/lib/libcrypto/man/man3/CMS_get0_RecipientInfos.34
-rw-r--r--secure/lib/libcrypto/man/man3/CMS_get0_SignerInfos.34
-rw-r--r--secure/lib/libcrypto/man/man3/CMS_get0_type.34
-rw-r--r--secure/lib/libcrypto/man/man3/CMS_get1_ReceiptRequest.34
-rw-r--r--secure/lib/libcrypto/man/man3/CMS_sign.34
-rw-r--r--secure/lib/libcrypto/man/man3/CMS_sign_receipt.34
-rw-r--r--secure/lib/libcrypto/man/man3/CMS_uncompress.34
-rw-r--r--secure/lib/libcrypto/man/man3/CMS_verify.34
-rw-r--r--secure/lib/libcrypto/man/man3/CMS_verify_receipt.34
-rw-r--r--secure/lib/libcrypto/man/man3/CONF_modules_free.34
-rw-r--r--secure/lib/libcrypto/man/man3/CONF_modules_load_file.34
-rw-r--r--secure/lib/libcrypto/man/man3/CRYPTO_THREAD_run_once.34
-rw-r--r--secure/lib/libcrypto/man/man3/CRYPTO_get_ex_new_index.34
-rw-r--r--secure/lib/libcrypto/man/man3/CRYPTO_memcmp.34
-rw-r--r--secure/lib/libcrypto/man/man3/CTLOG_STORE_get0_log_by_id.34
-rw-r--r--secure/lib/libcrypto/man/man3/CTLOG_STORE_new.34
-rw-r--r--secure/lib/libcrypto/man/man3/CTLOG_new.34
-rw-r--r--secure/lib/libcrypto/man/man3/CT_POLICY_EVAL_CTX_new.34
-rw-r--r--secure/lib/libcrypto/man/man3/DEFINE_STACK_OF.34
-rw-r--r--secure/lib/libcrypto/man/man3/DES_random_key.34
-rw-r--r--secure/lib/libcrypto/man/man3/DH_generate_key.34
-rw-r--r--secure/lib/libcrypto/man/man3/DH_generate_parameters.34
-rw-r--r--secure/lib/libcrypto/man/man3/DH_get0_pqg.34
-rw-r--r--secure/lib/libcrypto/man/man3/DH_get_1024_160.34
-rw-r--r--secure/lib/libcrypto/man/man3/DH_meth_new.34
-rw-r--r--secure/lib/libcrypto/man/man3/DH_new.34
-rw-r--r--secure/lib/libcrypto/man/man3/DH_new_by_nid.34
-rw-r--r--secure/lib/libcrypto/man/man3/DH_set_method.34
-rw-r--r--secure/lib/libcrypto/man/man3/DH_size.34
-rw-r--r--secure/lib/libcrypto/man/man3/DSA_SIG_new.34
-rw-r--r--secure/lib/libcrypto/man/man3/DSA_do_sign.34
-rw-r--r--secure/lib/libcrypto/man/man3/DSA_dup_DH.34
-rw-r--r--secure/lib/libcrypto/man/man3/DSA_generate_key.34
-rw-r--r--secure/lib/libcrypto/man/man3/DSA_generate_parameters.34
-rw-r--r--secure/lib/libcrypto/man/man3/DSA_get0_pqg.34
-rw-r--r--secure/lib/libcrypto/man/man3/DSA_meth_new.34
-rw-r--r--secure/lib/libcrypto/man/man3/DSA_new.34
-rw-r--r--secure/lib/libcrypto/man/man3/DSA_set_method.34
-rw-r--r--secure/lib/libcrypto/man/man3/DSA_sign.34
-rw-r--r--secure/lib/libcrypto/man/man3/DSA_size.34
-rw-r--r--secure/lib/libcrypto/man/man3/DTLS_get_data_mtu.34
-rw-r--r--secure/lib/libcrypto/man/man3/DTLS_set_timer_cb.34
-rw-r--r--secure/lib/libcrypto/man/man3/DTLSv1_listen.316
-rw-r--r--secure/lib/libcrypto/man/man3/ECDSA_SIG_new.34
-rw-r--r--secure/lib/libcrypto/man/man3/ECPKParameters_print.34
-rw-r--r--secure/lib/libcrypto/man/man3/EC_GFp_simple_method.34
-rw-r--r--secure/lib/libcrypto/man/man3/EC_GROUP_copy.394
-rw-r--r--secure/lib/libcrypto/man/man3/EC_GROUP_new.396
-rw-r--r--secure/lib/libcrypto/man/man3/EC_KEY_get_enc_flags.34
-rw-r--r--secure/lib/libcrypto/man/man3/EC_KEY_new.34
-rw-r--r--secure/lib/libcrypto/man/man3/EC_POINT_add.34
-rw-r--r--secure/lib/libcrypto/man/man3/EC_POINT_new.324
-rw-r--r--secure/lib/libcrypto/man/man3/ENGINE_add.34
-rw-r--r--secure/lib/libcrypto/man/man3/ERR_GET_LIB.34
-rw-r--r--secure/lib/libcrypto/man/man3/ERR_clear_error.34
-rw-r--r--secure/lib/libcrypto/man/man3/ERR_error_string.34
-rw-r--r--secure/lib/libcrypto/man/man3/ERR_get_error.34
-rw-r--r--secure/lib/libcrypto/man/man3/ERR_load_crypto_strings.34
-rw-r--r--secure/lib/libcrypto/man/man3/ERR_load_strings.34
-rw-r--r--secure/lib/libcrypto/man/man3/ERR_print_errors.34
-rw-r--r--secure/lib/libcrypto/man/man3/ERR_put_error.34
-rw-r--r--secure/lib/libcrypto/man/man3/ERR_remove_state.34
-rw-r--r--secure/lib/libcrypto/man/man3/ERR_set_mark.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_BytesToKey.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_CIPHER_CTX_get_cipher_data.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_CIPHER_meth_new.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_DigestInit.373
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_DigestSignInit.314
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_DigestVerifyInit.36
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_EncodeInit.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_EncryptInit.313
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_MD_meth_new.323
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_OpenInit.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_PKEY_ASN1_METHOD.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_PKEY_CTX_ctrl.320
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_PKEY_CTX_new.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_PKEY_CTX_set1_pbe_pass.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_PKEY_CTX_set_hkdf_md.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_PKEY_CTX_set_rsa_pss_keygen_md.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_PKEY_CTX_set_scrypt_N.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_PKEY_CTX_set_tls1_prf_md.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_PKEY_asn1_get_count.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_PKEY_cmp.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_PKEY_decrypt.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_PKEY_derive.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_PKEY_encrypt.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_PKEY_get_default_digest_nid.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_PKEY_keygen.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_PKEY_meth_get_count.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_PKEY_meth_new.330
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_PKEY_new.335
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_PKEY_print_private.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_PKEY_set1_RSA.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_PKEY_sign.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_PKEY_size.3210
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_PKEY_verify.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_PKEY_verify_recover.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_SealInit.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_SignInit.344
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_VerifyInit.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_aes.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_aria.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_bf_cbc.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_blake2b512.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_camellia.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_cast5_cbc.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_chacha20.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_des.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_desx_cbc.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_idea_cbc.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_md2.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_md4.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_md5.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_mdc2.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_rc2_cbc.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_rc4.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_rc5_32_12_16_cbc.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_ripemd160.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_seed_cbc.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_sha1.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_sha224.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_sha3_224.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_sm3.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_sm4_cbc.34
-rw-r--r--secure/lib/libcrypto/man/man3/EVP_whirlpool.34
-rw-r--r--secure/lib/libcrypto/man/man3/HMAC.34
-rw-r--r--secure/lib/libcrypto/man/man3/MD5.34
-rw-r--r--secure/lib/libcrypto/man/man3/MDC2_Init.34
-rw-r--r--secure/lib/libcrypto/man/man3/Makefile46
-rw-r--r--secure/lib/libcrypto/man/man3/OBJ_nid2obj.34
-rw-r--r--secure/lib/libcrypto/man/man3/OCSP_REQUEST_new.34
-rw-r--r--secure/lib/libcrypto/man/man3/OCSP_cert_to_id.34
-rw-r--r--secure/lib/libcrypto/man/man3/OCSP_request_add1_nonce.38
-rw-r--r--secure/lib/libcrypto/man/man3/OCSP_resp_find_status.34
-rw-r--r--secure/lib/libcrypto/man/man3/OCSP_response_status.38
-rw-r--r--secure/lib/libcrypto/man/man3/OCSP_sendreq_new.313
-rw-r--r--secure/lib/libcrypto/man/man3/OPENSSL_Applink.34
-rw-r--r--secure/lib/libcrypto/man/man3/OPENSSL_LH_COMPFUNC.34
-rw-r--r--secure/lib/libcrypto/man/man3/OPENSSL_LH_stats.34
-rw-r--r--secure/lib/libcrypto/man/man3/OPENSSL_VERSION_NUMBER.34
-rw-r--r--secure/lib/libcrypto/man/man3/OPENSSL_config.34
-rw-r--r--secure/lib/libcrypto/man/man3/OPENSSL_fork_prepare.34
-rw-r--r--secure/lib/libcrypto/man/man3/OPENSSL_ia32cap.34
-rw-r--r--secure/lib/libcrypto/man/man3/OPENSSL_init_crypto.34
-rw-r--r--secure/lib/libcrypto/man/man3/OPENSSL_init_ssl.34
-rw-r--r--secure/lib/libcrypto/man/man3/OPENSSL_instrument_bus.34
-rw-r--r--secure/lib/libcrypto/man/man3/OPENSSL_load_builtin_modules.34
-rw-r--r--secure/lib/libcrypto/man/man3/OPENSSL_malloc.38
-rw-r--r--secure/lib/libcrypto/man/man3/OPENSSL_secure_malloc.39
-rw-r--r--secure/lib/libcrypto/man/man3/OSSL_STORE_INFO.34
-rw-r--r--secure/lib/libcrypto/man/man3/OSSL_STORE_LOADER.34
-rw-r--r--secure/lib/libcrypto/man/man3/OSSL_STORE_SEARCH.34
-rw-r--r--secure/lib/libcrypto/man/man3/OSSL_STORE_expect.34
-rw-r--r--secure/lib/libcrypto/man/man3/OSSL_STORE_open.34
-rw-r--r--secure/lib/libcrypto/man/man3/OpenSSL_add_all_algorithms.34
-rw-r--r--secure/lib/libcrypto/man/man3/PEM_bytes_read_bio.38
-rw-r--r--secure/lib/libcrypto/man/man3/PEM_read.34
-rw-r--r--secure/lib/libcrypto/man/man3/PEM_read_CMS.34
-rw-r--r--secure/lib/libcrypto/man/man3/PEM_read_bio_PrivateKey.315
-rw-r--r--secure/lib/libcrypto/man/man3/PEM_read_bio_ex.36
-rw-r--r--secure/lib/libcrypto/man/man3/PEM_write_bio_CMS_stream.34
-rw-r--r--secure/lib/libcrypto/man/man3/PEM_write_bio_PKCS7_stream.34
-rw-r--r--secure/lib/libcrypto/man/man3/PKCS12_create.34
-rw-r--r--secure/lib/libcrypto/man/man3/PKCS12_newpass.34
-rw-r--r--secure/lib/libcrypto/man/man3/PKCS12_parse.34
-rw-r--r--secure/lib/libcrypto/man/man3/PKCS5_PBKDF2_HMAC.34
-rw-r--r--secure/lib/libcrypto/man/man3/PKCS7_decrypt.34
-rw-r--r--secure/lib/libcrypto/man/man3/PKCS7_encrypt.34
-rw-r--r--secure/lib/libcrypto/man/man3/PKCS7_sign.34
-rw-r--r--secure/lib/libcrypto/man/man3/PKCS7_sign_add_signer.34
-rw-r--r--secure/lib/libcrypto/man/man3/PKCS7_verify.34
-rw-r--r--secure/lib/libcrypto/man/man3/RAND_DRBG_generate.34
-rw-r--r--secure/lib/libcrypto/man/man3/RAND_DRBG_get0_master.34
-rw-r--r--secure/lib/libcrypto/man/man3/RAND_DRBG_new.34
-rw-r--r--secure/lib/libcrypto/man/man3/RAND_DRBG_reseed.34
-rw-r--r--secure/lib/libcrypto/man/man3/RAND_DRBG_set_callbacks.34
-rw-r--r--secure/lib/libcrypto/man/man3/RAND_DRBG_set_ex_data.34
-rw-r--r--secure/lib/libcrypto/man/man3/RAND_add.34
-rw-r--r--secure/lib/libcrypto/man/man3/RAND_bytes.330
-rw-r--r--secure/lib/libcrypto/man/man3/RAND_cleanup.34
-rw-r--r--secure/lib/libcrypto/man/man3/RAND_egd.34
-rw-r--r--secure/lib/libcrypto/man/man3/RAND_load_file.34
-rw-r--r--secure/lib/libcrypto/man/man3/RAND_set_rand_method.36
-rw-r--r--secure/lib/libcrypto/man/man3/RC4_set_key.34
-rw-r--r--secure/lib/libcrypto/man/man3/RIPEMD160_Init.34
-rw-r--r--secure/lib/libcrypto/man/man3/RSA_blinding_on.34
-rw-r--r--secure/lib/libcrypto/man/man3/RSA_check_key.34
-rw-r--r--secure/lib/libcrypto/man/man3/RSA_generate_key.34
-rw-r--r--secure/lib/libcrypto/man/man3/RSA_get0_key.311
-rw-r--r--secure/lib/libcrypto/man/man3/RSA_meth_new.34
-rw-r--r--secure/lib/libcrypto/man/man3/RSA_new.34
-rw-r--r--secure/lib/libcrypto/man/man3/RSA_padding_add_PKCS1_type_1.34
-rw-r--r--secure/lib/libcrypto/man/man3/RSA_print.34
-rw-r--r--secure/lib/libcrypto/man/man3/RSA_private_encrypt.34
-rw-r--r--secure/lib/libcrypto/man/man3/RSA_public_encrypt.34
-rw-r--r--secure/lib/libcrypto/man/man3/RSA_set_method.34
-rw-r--r--secure/lib/libcrypto/man/man3/RSA_sign.34
-rw-r--r--secure/lib/libcrypto/man/man3/RSA_sign_ASN1_OCTET_STRING.34
-rw-r--r--secure/lib/libcrypto/man/man3/RSA_size.34
-rw-r--r--secure/lib/libcrypto/man/man3/SCT_new.34
-rw-r--r--secure/lib/libcrypto/man/man3/SCT_print.34
-rw-r--r--secure/lib/libcrypto/man/man3/SCT_validate.34
-rw-r--r--secure/lib/libcrypto/man/man3/SHA256_Init.34
-rw-r--r--secure/lib/libcrypto/man/man3/SMIME_read_CMS.34
-rw-r--r--secure/lib/libcrypto/man/man3/SMIME_read_PKCS7.34
-rw-r--r--secure/lib/libcrypto/man/man3/SMIME_write_CMS.34
-rw-r--r--secure/lib/libcrypto/man/man3/SMIME_write_PKCS7.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CIPHER_get_name.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_COMP_add_compression_method.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CONF_CTX_new.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CONF_CTX_set1_prefix.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CONF_CTX_set_flags.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CONF_CTX_set_ssl_ctx.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CONF_cmd.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CONF_cmd_argv.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_add1_chain_cert.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_add_extra_chain_cert.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_add_session.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_config.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_ctrl.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_dane_enable.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_flush_sessions.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_free.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_get0_param.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_get_verify_mode.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_has_client_custom_ext.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_load_verify_locations.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_new.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_sess_number.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_sess_set_cache_size.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_sess_set_get_cb.361
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_sessions.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set0_CA_list.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set1_curves.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set1_sigalgs.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set1_verify_cert_store.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set_alpn_select_cb.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set_cert_cb.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set_cert_store.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set_cert_verify_callback.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set_cipher_list.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set_client_cert_cb.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set_client_hello_cb.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set_ct_validation_callback.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set_ctlog_list_file.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set_default_passwd_cb.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set_ex_data.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set_generate_session_id.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set_info_callback.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set_keylog_callback.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set_max_cert_list.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set_min_proto_version.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set_mode.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set_msg_callback.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set_num_tickets.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set_options.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set_psk_client_callback.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set_quiet_shutdown.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set_read_ahead.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set_record_padding_callback.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set_security_level.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set_session_cache_mode.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set_session_id_context.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set_session_ticket_cb.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set_split_send_fragment.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set_ssl_version.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set_stateless_cookie_generate_cb.368
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set_timeout.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set_tlsext_servername_callback.3103
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set_tlsext_status_cb.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set_tlsext_ticket_key_cb.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set_tlsext_use_srtp.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set_tmp_dh_callback.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_set_verify.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_use_certificate.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_use_psk_identity_hint.310
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_CTX_use_serverinfo.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_SESSION_free.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_SESSION_get0_cipher.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_SESSION_get0_hostname.311
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_SESSION_get0_id_context.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_SESSION_get0_peer.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_SESSION_get_compress_id.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_SESSION_get_ex_data.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_SESSION_get_protocol_version.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_SESSION_get_time.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_SESSION_has_ticket.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_SESSION_is_resumable.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_SESSION_print.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_SESSION_set1_id.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_accept.38
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_alert_type_string.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_alloc_buffers.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_check_chain.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_clear.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_connect.38
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_do_handshake.38
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_export_keying_material.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_extension_supported.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_free.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_get0_peer_scts.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_get_SSL_CTX.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_get_all_async_fds.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_get_ciphers.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_get_client_random.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_get_current_cipher.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_get_default_timeout.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_get_error.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_get_extms_support.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_get_fd.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_get_peer_cert_chain.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_get_peer_certificate.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_get_peer_signature_nid.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_get_peer_tmp_key.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_get_psk_identity.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_get_rbio.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_get_session.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_get_shared_sigalgs.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_get_verify_result.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_get_version.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_in_init.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_key_update.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_library_init.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_load_client_CA_file.320
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_new.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_pending.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_read.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_read_early_data.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_rstate_string.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_session_reused.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_set1_host.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_set_bio.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_set_connect_state.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_set_fd.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_set_session.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_set_shutdown.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_set_verify_result.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_shutdown.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_state_string.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_want.34
-rw-r--r--secure/lib/libcrypto/man/man3/SSL_write.34
-rw-r--r--secure/lib/libcrypto/man/man3/UI_STRING.34
-rw-r--r--secure/lib/libcrypto/man/man3/UI_UTIL_read_pw.34
-rw-r--r--secure/lib/libcrypto/man/man3/UI_create_method.34
-rw-r--r--secure/lib/libcrypto/man/man3/UI_new.34
-rw-r--r--secure/lib/libcrypto/man/man3/X509V3_get_d2i.34
-rw-r--r--secure/lib/libcrypto/man/man3/X509_ALGOR_dup.34
-rw-r--r--secure/lib/libcrypto/man/man3/X509_CRL_get0_by_serial.34
-rw-r--r--secure/lib/libcrypto/man/man3/X509_EXTENSION_set_object.34
-rw-r--r--secure/lib/libcrypto/man/man3/X509_LOOKUP.3310
-rw-r--r--secure/lib/libcrypto/man/man3/X509_LOOKUP_hash_dir.34
-rw-r--r--secure/lib/libcrypto/man/man3/X509_LOOKUP_meth_new.325
-rw-r--r--secure/lib/libcrypto/man/man3/X509_NAME_ENTRY_get_object.34
-rw-r--r--secure/lib/libcrypto/man/man3/X509_NAME_add_entry_by_txt.34
-rw-r--r--secure/lib/libcrypto/man/man3/X509_NAME_get0_der.34
-rw-r--r--secure/lib/libcrypto/man/man3/X509_NAME_get_index_by_NID.34
-rw-r--r--secure/lib/libcrypto/man/man3/X509_NAME_print_ex.34
-rw-r--r--secure/lib/libcrypto/man/man3/X509_PUBKEY_new.34
-rw-r--r--secure/lib/libcrypto/man/man3/X509_SIG_get0.34
-rw-r--r--secure/lib/libcrypto/man/man3/X509_STORE_CTX_get_error.34
-rw-r--r--secure/lib/libcrypto/man/man3/X509_STORE_CTX_new.36
-rw-r--r--secure/lib/libcrypto/man/man3/X509_STORE_CTX_set_verify_cb.34
-rw-r--r--secure/lib/libcrypto/man/man3/X509_STORE_add_cert.321
-rw-r--r--secure/lib/libcrypto/man/man3/X509_STORE_get0_param.34
-rw-r--r--secure/lib/libcrypto/man/man3/X509_STORE_new.34
-rw-r--r--secure/lib/libcrypto/man/man3/X509_STORE_set_verify_cb_func.34
-rw-r--r--secure/lib/libcrypto/man/man3/X509_VERIFY_PARAM_set_flags.34
-rw-r--r--secure/lib/libcrypto/man/man3/X509_check_ca.34
-rw-r--r--secure/lib/libcrypto/man/man3/X509_check_host.34
-rw-r--r--secure/lib/libcrypto/man/man3/X509_check_issued.34
-rw-r--r--secure/lib/libcrypto/man/man3/X509_check_private_key.34
-rw-r--r--secure/lib/libcrypto/man/man3/X509_cmp.34
-rw-r--r--secure/lib/libcrypto/man/man3/X509_cmp_time.34
-rw-r--r--secure/lib/libcrypto/man/man3/X509_digest.34
-rw-r--r--secure/lib/libcrypto/man/man3/X509_dup.34
-rw-r--r--secure/lib/libcrypto/man/man3/X509_get0_notBefore.34
-rw-r--r--secure/lib/libcrypto/man/man3/X509_get0_signature.34
-rw-r--r--secure/lib/libcrypto/man/man3/X509_get0_uids.34
-rw-r--r--secure/lib/libcrypto/man/man3/X509_get_extension_flags.34
-rw-r--r--secure/lib/libcrypto/man/man3/X509_get_pubkey.34
-rw-r--r--secure/lib/libcrypto/man/man3/X509_get_serialNumber.34
-rw-r--r--secure/lib/libcrypto/man/man3/X509_get_subject_name.34
-rw-r--r--secure/lib/libcrypto/man/man3/X509_get_version.34
-rw-r--r--secure/lib/libcrypto/man/man3/X509_new.34
-rw-r--r--secure/lib/libcrypto/man/man3/X509_sign.34
-rw-r--r--secure/lib/libcrypto/man/man3/X509_verify_cert.34
-rw-r--r--secure/lib/libcrypto/man/man3/X509v3_get_ext_by_NID.34
-rw-r--r--secure/lib/libcrypto/man/man3/d2i_DHparams.34
-rw-r--r--secure/lib/libcrypto/man/man3/d2i_PKCS8PrivateKey_bio.34
-rw-r--r--secure/lib/libcrypto/man/man3/d2i_PrivateKey.34
-rw-r--r--secure/lib/libcrypto/man/man3/d2i_SSL_SESSION.34
-rw-r--r--secure/lib/libcrypto/man/man3/d2i_X509.312
-rw-r--r--secure/lib/libcrypto/man/man3/i2d_CMS_bio_stream.34
-rw-r--r--secure/lib/libcrypto/man/man3/i2d_PKCS7_bio_stream.34
-rw-r--r--secure/lib/libcrypto/man/man3/i2d_re_X509_tbs.34
-rw-r--r--secure/lib/libcrypto/man/man3/o2i_SCT_LIST.34
-rw-r--r--secure/lib/libcrypto/man/man5/x509v3_config.52
-rw-r--r--secure/lib/libcrypto/man/man7/Ed25519.76
-rw-r--r--secure/lib/libcrypto/man/man7/Makefile1
-rw-r--r--secure/lib/libcrypto/man/man7/RAND.72
-rw-r--r--secure/lib/libcrypto/man/man7/RAND_DRBG.72
-rw-r--r--secure/lib/libcrypto/man/man7/RSA-PSS.72
-rw-r--r--secure/lib/libcrypto/man/man7/SM2.72
-rw-r--r--secure/lib/libcrypto/man/man7/X25519.76
-rw-r--r--secure/lib/libcrypto/man/man7/bio.72
-rw-r--r--secure/lib/libcrypto/man/man7/ct.72
-rw-r--r--secure/lib/libcrypto/man/man7/des_modes.72
-rw-r--r--secure/lib/libcrypto/man/man7/evp.72
-rw-r--r--secure/lib/libcrypto/man/man7/ossl_store-file.72
-rw-r--r--secure/lib/libcrypto/man/man7/ossl_store.72
-rw-r--r--secure/lib/libcrypto/man/man7/passphrase-encoding.76
-rw-r--r--secure/lib/libcrypto/man/man7/proxy-certificates.7478
-rw-r--r--secure/lib/libcrypto/man/man7/scrypt.72
-rw-r--r--secure/lib/libcrypto/man/man7/ssl.72
-rw-r--r--secure/lib/libcrypto/man/man7/x509.72
522 files changed, 3006 insertions, 45451 deletions
diff --git a/secure/lib/libcrypto/Makefile.inc b/secure/lib/libcrypto/Makefile.inc
index a9d8df50be024..91217dc1e4546 100644
--- a/secure/lib/libcrypto/Makefile.inc
+++ b/secure/lib/libcrypto/Makefile.inc
@@ -3,8 +3,8 @@
.include <bsd.own.mk>
# OpenSSL version used for manual page generation
-OPENSSL_VER= 1.1.1d
-OPENSSL_DATE= 2019-09-10
+OPENSSL_VER= 1.1.1e
+OPENSSL_DATE= 2020-03-17
LCRYPTO_SRC= ${SRCTOP}/crypto/openssl
LCRYPTO_DOC= ${LCRYPTO_SRC}/doc
diff --git a/secure/lib/libcrypto/aarch64/ecp_nistz256-armv8.S b/secure/lib/libcrypto/aarch64/ecp_nistz256-armv8.S
index c0b5f8cede176..f7fcce4365fa0 100644
--- a/secure/lib/libcrypto/aarch64/ecp_nistz256-armv8.S
+++ b/secure/lib/libcrypto/aarch64/ecp_nistz256-armv8.S
@@ -3017,7 +3017,7 @@ __ecp_nistz256_div_by_2:
.align 5
ecp_nistz256_point_double:
.inst 0xd503233f // paciasp
- stp x29,x30,[sp,#-80]!
+ stp x29,x30,[sp,#-96]!
add x29,sp,#0
stp x19,x20,[sp,#16]
stp x21,x22,[sp,#32]
@@ -3150,7 +3150,7 @@ ecp_nistz256_point_double:
add sp,x29,#0 // destroy frame
ldp x19,x20,[x29,#16]
ldp x21,x22,[x29,#32]
- ldp x29,x30,[sp],#80
+ ldp x29,x30,[sp],#96
.inst 0xd50323bf // autiasp
ret
.size ecp_nistz256_point_double,.-ecp_nistz256_point_double
@@ -3159,12 +3159,13 @@ ecp_nistz256_point_double:
.align 5
ecp_nistz256_point_add:
.inst 0xd503233f // paciasp
- stp x29,x30,[sp,#-80]!
+ stp x29,x30,[sp,#-96]!
add x29,sp,#0
stp x19,x20,[sp,#16]
stp x21,x22,[sp,#32]
stp x23,x24,[sp,#48]
stp x25,x26,[sp,#64]
+ stp x27,x28,[sp,#80]
sub sp,sp,#32*12
ldp x4,x5,[x2,#64] // in2_z
@@ -3178,7 +3179,7 @@ ecp_nistz256_point_add:
orr x10,x6,x7
orr x25,x8,x10
cmp x25,#0
- csetm x25,ne // !in2infty
+ csetm x25,ne // ~in2infty
add x0,sp,#192
bl __ecp_nistz256_sqr_mont // p256_sqr_mont(Z2sqr, in2_z);
@@ -3188,7 +3189,7 @@ ecp_nistz256_point_add:
orr x10,x6,x7
orr x24,x8,x10
cmp x24,#0
- csetm x24,ne // !in1infty
+ csetm x24,ne // ~in1infty
add x0,sp,#128
bl __ecp_nistz256_sqr_mont // p256_sqr_mont(Z1sqr, in1_z);
@@ -3229,7 +3230,7 @@ ecp_nistz256_point_add:
orr x14,x14,x15 // see if result is zero
orr x16,x16,x17
- orr x26,x14,x16
+ orr x26,x14,x16 // ~is_equal(S1,S2)
add x2,sp,#192
add x0,sp,#256
@@ -3250,32 +3251,21 @@ ecp_nistz256_point_add:
orr x14,x14,x15 // see if result is zero
orr x16,x16,x17
- orr x14,x14,x16
- tst x14,x14
- b.ne .Ladd_proceed // is_equal(U1,U2)?
+ orr x14,x14,x16 // ~is_equal(U1,U2)
- tst x24,x25
- b.eq .Ladd_proceed // (in1infty || in2infty)?
+ mvn x27,x24 // -1/0 -> 0/-1
+ mvn x28,x25 // -1/0 -> 0/-1
+ orr x14,x14,x27
+ orr x14,x14,x28
+ orr x14,x14,x26
+ cbnz x14,.Ladd_proceed // if(~is_equal(U1,U2) | in1infty | in2infty | ~is_equal(S1,S2))
- tst x26,x26
- b.eq .Ladd_double // is_equal(S1,S2)?
-
- eor x4,x4,x4
- eor x5,x5,x5
- stp x4,x5,[x21]
- stp x4,x5,[x21,#16]
- stp x4,x5,[x21,#32]
- stp x4,x5,[x21,#48]
- stp x4,x5,[x21,#64]
- stp x4,x5,[x21,#80]
- b .Ladd_done
-
-.align 4
.Ladd_double:
mov x1,x22
mov x0,x21
ldp x23,x24,[x29,#48]
ldp x25,x26,[x29,#64]
+ ldp x27,x28,[x29,#80]
add sp,sp,#32*(12-4) // difference in stack frames
b .Ldouble_shortcut
@@ -3357,14 +3347,14 @@ ecp_nistz256_point_add:
ldp x8,x9,[x23] // in2
ldp x10,x11,[x23,#16]
ldp x14,x15,[x22,#0] // in1
- cmp x24,#0 // !, remember?
+ cmp x24,#0 // ~, remember?
ldp x16,x17,[x22,#0+16]
csel x8,x4,x8,ne
csel x9,x5,x9,ne
ldp x4,x5,[sp,#0+0+32] // res
csel x10,x6,x10,ne
csel x11,x7,x11,ne
- cmp x25,#0 // !, remember?
+ cmp x25,#0 // ~, remember?
ldp x6,x7,[sp,#0+0+48]
csel x14,x8,x14,ne
csel x15,x9,x15,ne
@@ -3375,14 +3365,14 @@ ecp_nistz256_point_add:
stp x14,x15,[x21,#0]
stp x16,x17,[x21,#0+16]
ldp x14,x15,[x22,#32] // in1
- cmp x24,#0 // !, remember?
+ cmp x24,#0 // ~, remember?
ldp x16,x17,[x22,#32+16]
csel x8,x4,x8,ne
csel x9,x5,x9,ne
ldp x4,x5,[sp,#0+32+32] // res
csel x10,x6,x10,ne
csel x11,x7,x11,ne
- cmp x25,#0 // !, remember?
+ cmp x25,#0 // ~, remember?
ldp x6,x7,[sp,#0+32+48]
csel x14,x8,x14,ne
csel x15,x9,x15,ne
@@ -3393,13 +3383,13 @@ ecp_nistz256_point_add:
stp x14,x15,[x21,#32]
stp x16,x17,[x21,#32+16]
ldp x14,x15,[x22,#64] // in1
- cmp x24,#0 // !, remember?
+ cmp x24,#0 // ~, remember?
ldp x16,x17,[x22,#64+16]
csel x8,x4,x8,ne
csel x9,x5,x9,ne
csel x10,x6,x10,ne
csel x11,x7,x11,ne
- cmp x25,#0 // !, remember?
+ cmp x25,#0 // ~, remember?
csel x14,x8,x14,ne
csel x15,x9,x15,ne
csel x16,x10,x16,ne
@@ -3413,7 +3403,8 @@ ecp_nistz256_point_add:
ldp x21,x22,[x29,#32]
ldp x23,x24,[x29,#48]
ldp x25,x26,[x29,#64]
- ldp x29,x30,[sp],#80
+ ldp x27,x28,[x29,#80]
+ ldp x29,x30,[sp],#96
.inst 0xd50323bf // autiasp
ret
.size ecp_nistz256_point_add,.-ecp_nistz256_point_add
@@ -3442,7 +3433,7 @@ ecp_nistz256_point_add_affine:
orr x10,x6,x7
orr x24,x8,x10
cmp x24,#0
- csetm x24,ne // !in1infty
+ csetm x24,ne // ~in1infty
ldp x14,x15,[x2] // in2_x
ldp x16,x17,[x2,#16]
@@ -3456,7 +3447,7 @@ ecp_nistz256_point_add_affine:
orr x8,x8,x10
orr x25,x14,x8
cmp x25,#0
- csetm x25,ne // !in2infty
+ csetm x25,ne // ~in2infty
add x0,sp,#128
bl __ecp_nistz256_sqr_mont // p256_sqr_mont(Z1sqr, in1_z);
@@ -3563,14 +3554,14 @@ ecp_nistz256_point_add_affine:
ldp x8,x9,[x23] // in2
ldp x10,x11,[x23,#16]
ldp x14,x15,[x22,#0] // in1
- cmp x24,#0 // !, remember?
+ cmp x24,#0 // ~, remember?
ldp x16,x17,[x22,#0+16]
csel x8,x4,x8,ne
csel x9,x5,x9,ne
ldp x4,x5,[sp,#0+0+32] // res
csel x10,x6,x10,ne
csel x11,x7,x11,ne
- cmp x25,#0 // !, remember?
+ cmp x25,#0 // ~, remember?
ldp x6,x7,[sp,#0+0+48]
csel x14,x8,x14,ne
csel x15,x9,x15,ne
@@ -3582,14 +3573,14 @@ ecp_nistz256_point_add_affine:
stp x16,x17,[x21,#0+16]
adr x23,.Lone_mont-64
ldp x14,x15,[x22,#32] // in1
- cmp x24,#0 // !, remember?
+ cmp x24,#0 // ~, remember?
ldp x16,x17,[x22,#32+16]
csel x8,x4,x8,ne
csel x9,x5,x9,ne
ldp x4,x5,[sp,#0+32+32] // res
csel x10,x6,x10,ne
csel x11,x7,x11,ne
- cmp x25,#0 // !, remember?
+ cmp x25,#0 // ~, remember?
ldp x6,x7,[sp,#0+32+48]
csel x14,x8,x14,ne
csel x15,x9,x15,ne
@@ -3600,13 +3591,13 @@ ecp_nistz256_point_add_affine:
stp x14,x15,[x21,#32]
stp x16,x17,[x21,#32+16]
ldp x14,x15,[x22,#64] // in1
- cmp x24,#0 // !, remember?
+ cmp x24,#0 // ~, remember?
ldp x16,x17,[x22,#64+16]
csel x8,x4,x8,ne
csel x9,x5,x9,ne
csel x10,x6,x10,ne
csel x11,x7,x11,ne
- cmp x25,#0 // !, remember?
+ cmp x25,#0 // ~, remember?
csel x14,x8,x14,ne
csel x15,x9,x15,ne
csel x16,x10,x16,ne
diff --git a/secure/lib/libcrypto/aarch64/sha256-armv8.S b/secure/lib/libcrypto/aarch64/sha256-armv8.S
index 40d1fb269b353..35bf48ba51784 100644
--- a/secure/lib/libcrypto/aarch64/sha256-armv8.S
+++ b/secure/lib/libcrypto/aarch64/sha256-armv8.S
@@ -1,6 +1,6 @@
/* $FreeBSD$ */
/* Do not modify. This file is auto-generated from sha512-armv8.pl. */
-// Copyright 2014-2019 The OpenSSL Project Authors. All Rights Reserved.
+// Copyright 2014-2020 The OpenSSL Project Authors. All Rights Reserved.
//
// Licensed under the OpenSSL license (the "License"). You may not use
// this file except in compliance with the License. You can obtain a copy
diff --git a/secure/lib/libcrypto/aarch64/sha512-armv8.S b/secure/lib/libcrypto/aarch64/sha512-armv8.S
index a2a2b030ef4c9..06cf5a239d897 100644
--- a/secure/lib/libcrypto/aarch64/sha512-armv8.S
+++ b/secure/lib/libcrypto/aarch64/sha512-armv8.S
@@ -1,6 +1,6 @@
/* $FreeBSD$ */
/* Do not modify. This file is auto-generated from sha512-armv8.pl. */
-// Copyright 2014-2019 The OpenSSL Project Authors. All Rights Reserved.
+// Copyright 2014-2020 The OpenSSL Project Authors. All Rights Reserved.
//
// Licensed under the OpenSSL license (the "License"). You may not use
// this file except in compliance with the License. You can obtain a copy
diff --git a/secure/lib/libcrypto/amd64/aesni-gcm-x86_64.S b/secure/lib/libcrypto/amd64/aesni-gcm-x86_64.S
index 723abb458f981..1cdcc86043b23 100644
--- a/secure/lib/libcrypto/amd64/aesni-gcm-x86_64.S
+++ b/secure/lib/libcrypto/amd64/aesni-gcm-x86_64.S
@@ -2,786 +2,20 @@
/* Do not modify. This file is auto-generated from aesni-gcm-x86_64.pl. */
.text
-.type _aesni_ctr32_ghash_6x,@function
-.align 32
-_aesni_ctr32_ghash_6x:
- vmovdqu 32(%r11),%xmm2
- subq $6,%rdx
- vpxor %xmm4,%xmm4,%xmm4
- vmovdqu 0-128(%rcx),%xmm15
- vpaddb %xmm2,%xmm1,%xmm10
- vpaddb %xmm2,%xmm10,%xmm11
- vpaddb %xmm2,%xmm11,%xmm12
- vpaddb %xmm2,%xmm12,%xmm13
- vpaddb %xmm2,%xmm13,%xmm14
- vpxor %xmm15,%xmm1,%xmm9
- vmovdqu %xmm4,16+8(%rsp)
- jmp .Loop6x
-
-.align 32
-.Loop6x:
- addl $100663296,%ebx
- jc .Lhandle_ctr32
- vmovdqu 0-32(%r9),%xmm3
- vpaddb %xmm2,%xmm14,%xmm1
- vpxor %xmm15,%xmm10,%xmm10
- vpxor %xmm15,%xmm11,%xmm11
-
-.Lresume_ctr32:
- vmovdqu %xmm1,(%r8)
- vpclmulqdq $0x10,%xmm3,%xmm7,%xmm5
- vpxor %xmm15,%xmm12,%xmm12
- vmovups 16-128(%rcx),%xmm2
- vpclmulqdq $0x01,%xmm3,%xmm7,%xmm6
- xorq %r12,%r12
- cmpq %r14,%r15
-
- vaesenc %xmm2,%xmm9,%xmm9
- vmovdqu 48+8(%rsp),%xmm0
- vpxor %xmm15,%xmm13,%xmm13
- vpclmulqdq $0x00,%xmm3,%xmm7,%xmm1
- vaesenc %xmm2,%xmm10,%xmm10
- vpxor %xmm15,%xmm14,%xmm14
- setnc %r12b
- vpclmulqdq $0x11,%xmm3,%xmm7,%xmm7
- vaesenc %xmm2,%xmm11,%xmm11
- vmovdqu 16-32(%r9),%xmm3
- negq %r12
- vaesenc %xmm2,%xmm12,%xmm12
- vpxor %xmm5,%xmm6,%xmm6
- vpclmulqdq $0x00,%xmm3,%xmm0,%xmm5
- vpxor %xmm4,%xmm8,%xmm8
- vaesenc %xmm2,%xmm13,%xmm13
- vpxor %xmm5,%xmm1,%xmm4
- andq $0x60,%r12
- vmovups 32-128(%rcx),%xmm15
- vpclmulqdq $0x10,%xmm3,%xmm0,%xmm1
- vaesenc %xmm2,%xmm14,%xmm14
-
- vpclmulqdq $0x01,%xmm3,%xmm0,%xmm2
- leaq (%r14,%r12,1),%r14
- vaesenc %xmm15,%xmm9,%xmm9
- vpxor 16+8(%rsp),%xmm8,%xmm8
- vpclmulqdq $0x11,%xmm3,%xmm0,%xmm3
- vmovdqu 64+8(%rsp),%xmm0
- vaesenc %xmm15,%xmm10,%xmm10
- movbeq 88(%r14),%r13
- vaesenc %xmm15,%xmm11,%xmm11
- movbeq 80(%r14),%r12
- vaesenc %xmm15,%xmm12,%xmm12
- movq %r13,32+8(%rsp)
- vaesenc %xmm15,%xmm13,%xmm13
- movq %r12,40+8(%rsp)
- vmovdqu 48-32(%r9),%xmm5
- vaesenc %xmm15,%xmm14,%xmm14
-
- vmovups 48-128(%rcx),%xmm15
- vpxor %xmm1,%xmm6,%xmm6
- vpclmulqdq $0x00,%xmm5,%xmm0,%xmm1
- vaesenc %xmm15,%xmm9,%xmm9
- vpxor %xmm2,%xmm6,%xmm6
- vpclmulqdq $0x10,%xmm5,%xmm0,%xmm2
- vaesenc %xmm15,%xmm10,%xmm10
- vpxor %xmm3,%xmm7,%xmm7
- vpclmulqdq $0x01,%xmm5,%xmm0,%xmm3
- vaesenc %xmm15,%xmm11,%xmm11
- vpclmulqdq $0x11,%xmm5,%xmm0,%xmm5
- vmovdqu 80+8(%rsp),%xmm0
- vaesenc %xmm15,%xmm12,%xmm12
- vaesenc %xmm15,%xmm13,%xmm13
- vpxor %xmm1,%xmm4,%xmm4
- vmovdqu 64-32(%r9),%xmm1
- vaesenc %xmm15,%xmm14,%xmm14
-
- vmovups 64-128(%rcx),%xmm15
- vpxor %xmm2,%xmm6,%xmm6
- vpclmulqdq $0x00,%xmm1,%xmm0,%xmm2
- vaesenc %xmm15,%xmm9,%xmm9
- vpxor %xmm3,%xmm6,%xmm6
- vpclmulqdq $0x10,%xmm1,%xmm0,%xmm3
- vaesenc %xmm15,%xmm10,%xmm10
- movbeq 72(%r14),%r13
- vpxor %xmm5,%xmm7,%xmm7
- vpclmulqdq $0x01,%xmm1,%xmm0,%xmm5
- vaesenc %xmm15,%xmm11,%xmm11
- movbeq 64(%r14),%r12
- vpclmulqdq $0x11,%xmm1,%xmm0,%xmm1
- vmovdqu 96+8(%rsp),%xmm0
- vaesenc %xmm15,%xmm12,%xmm12
- movq %r13,48+8(%rsp)
- vaesenc %xmm15,%xmm13,%xmm13
- movq %r12,56+8(%rsp)
- vpxor %xmm2,%xmm4,%xmm4
- vmovdqu 96-32(%r9),%xmm2
- vaesenc %xmm15,%xmm14,%xmm14
-
- vmovups 80-128(%rcx),%xmm15
- vpxor %xmm3,%xmm6,%xmm6
- vpclmulqdq $0x00,%xmm2,%xmm0,%xmm3
- vaesenc %xmm15,%xmm9,%xmm9
- vpxor %xmm5,%xmm6,%xmm6
- vpclmulqdq $0x10,%xmm2,%xmm0,%xmm5
- vaesenc %xmm15,%xmm10,%xmm10
- movbeq 56(%r14),%r13
- vpxor %xmm1,%xmm7,%xmm7
- vpclmulqdq $0x01,%xmm2,%xmm0,%xmm1
- vpxor 112+8(%rsp),%xmm8,%xmm8
- vaesenc %xmm15,%xmm11,%xmm11
- movbeq 48(%r14),%r12
- vpclmulqdq $0x11,%xmm2,%xmm0,%xmm2
- vaesenc %xmm15,%xmm12,%xmm12
- movq %r13,64+8(%rsp)
- vaesenc %xmm15,%xmm13,%xmm13
- movq %r12,72+8(%rsp)
- vpxor %xmm3,%xmm4,%xmm4
- vmovdqu 112-32(%r9),%xmm3
- vaesenc %xmm15,%xmm14,%xmm14
-
- vmovups 96-128(%rcx),%xmm15
- vpxor %xmm5,%xmm6,%xmm6
- vpclmulqdq $0x10,%xmm3,%xmm8,%xmm5
- vaesenc %xmm15,%xmm9,%xmm9
- vpxor %xmm1,%xmm6,%xmm6
- vpclmulqdq $0x01,%xmm3,%xmm8,%xmm1
- vaesenc %xmm15,%xmm10,%xmm10
- movbeq 40(%r14),%r13
- vpxor %xmm2,%xmm7,%xmm7
- vpclmulqdq $0x00,%xmm3,%xmm8,%xmm2
- vaesenc %xmm15,%xmm11,%xmm11
- movbeq 32(%r14),%r12
- vpclmulqdq $0x11,%xmm3,%xmm8,%xmm8
- vaesenc %xmm15,%xmm12,%xmm12
- movq %r13,80+8(%rsp)
- vaesenc %xmm15,%xmm13,%xmm13
- movq %r12,88+8(%rsp)
- vpxor %xmm5,%xmm6,%xmm6
- vaesenc %xmm15,%xmm14,%xmm14
- vpxor %xmm1,%xmm6,%xmm6
-
- vmovups 112-128(%rcx),%xmm15
- vpslldq $8,%xmm6,%xmm5
- vpxor %xmm2,%xmm4,%xmm4
- vmovdqu 16(%r11),%xmm3
-
- vaesenc %xmm15,%xmm9,%xmm9
- vpxor %xmm8,%xmm7,%xmm7
- vaesenc %xmm15,%xmm10,%xmm10
- vpxor %xmm5,%xmm4,%xmm4
- movbeq 24(%r14),%r13
- vaesenc %xmm15,%xmm11,%xmm11
- movbeq 16(%r14),%r12
- vpalignr $8,%xmm4,%xmm4,%xmm0
- vpclmulqdq $0x10,%xmm3,%xmm4,%xmm4
- movq %r13,96+8(%rsp)
- vaesenc %xmm15,%xmm12,%xmm12
- movq %r12,104+8(%rsp)
- vaesenc %xmm15,%xmm13,%xmm13
- vmovups 128-128(%rcx),%xmm1
- vaesenc %xmm15,%xmm14,%xmm14
-
- vaesenc %xmm1,%xmm9,%xmm9
- vmovups 144-128(%rcx),%xmm15
- vaesenc %xmm1,%xmm10,%xmm10
- vpsrldq $8,%xmm6,%xmm6
- vaesenc %xmm1,%xmm11,%xmm11
- vpxor %xmm6,%xmm7,%xmm7
- vaesenc %xmm1,%xmm12,%xmm12
- vpxor %xmm0,%xmm4,%xmm4
- movbeq 8(%r14),%r13
- vaesenc %xmm1,%xmm13,%xmm13
- movbeq 0(%r14),%r12
- vaesenc %xmm1,%xmm14,%xmm14
- vmovups 160-128(%rcx),%xmm1
- cmpl $11,%ebp
- jb .Lenc_tail
-
- vaesenc %xmm15,%xmm9,%xmm9
- vaesenc %xmm15,%xmm10,%xmm10
- vaesenc %xmm15,%xmm11,%xmm11
- vaesenc %xmm15,%xmm12,%xmm12
- vaesenc %xmm15,%xmm13,%xmm13
- vaesenc %xmm15,%xmm14,%xmm14
-
- vaesenc %xmm1,%xmm9,%xmm9
- vaesenc %xmm1,%xmm10,%xmm10
- vaesenc %xmm1,%xmm11,%xmm11
- vaesenc %xmm1,%xmm12,%xmm12
- vaesenc %xmm1,%xmm13,%xmm13
- vmovups 176-128(%rcx),%xmm15
- vaesenc %xmm1,%xmm14,%xmm14
- vmovups 192-128(%rcx),%xmm1
- je .Lenc_tail
-
- vaesenc %xmm15,%xmm9,%xmm9
- vaesenc %xmm15,%xmm10,%xmm10
- vaesenc %xmm15,%xmm11,%xmm11
- vaesenc %xmm15,%xmm12,%xmm12
- vaesenc %xmm15,%xmm13,%xmm13
- vaesenc %xmm15,%xmm14,%xmm14
-
- vaesenc %xmm1,%xmm9,%xmm9
- vaesenc %xmm1,%xmm10,%xmm10
- vaesenc %xmm1,%xmm11,%xmm11
- vaesenc %xmm1,%xmm12,%xmm12
- vaesenc %xmm1,%xmm13,%xmm13
- vmovups 208-128(%rcx),%xmm15
- vaesenc %xmm1,%xmm14,%xmm14
- vmovups 224-128(%rcx),%xmm1
- jmp .Lenc_tail
-
-.align 32
-.Lhandle_ctr32:
- vmovdqu (%r11),%xmm0
- vpshufb %xmm0,%xmm1,%xmm6
- vmovdqu 48(%r11),%xmm5
- vpaddd 64(%r11),%xmm6,%xmm10
- vpaddd %xmm5,%xmm6,%xmm11
- vmovdqu 0-32(%r9),%xmm3
- vpaddd %xmm5,%xmm10,%xmm12
- vpshufb %xmm0,%xmm10,%xmm10
- vpaddd %xmm5,%xmm11,%xmm13
- vpshufb %xmm0,%xmm11,%xmm11
- vpxor %xmm15,%xmm10,%xmm10
- vpaddd %xmm5,%xmm12,%xmm14
- vpshufb %xmm0,%xmm12,%xmm12
- vpxor %xmm15,%xmm11,%xmm11
- vpaddd %xmm5,%xmm13,%xmm1
- vpshufb %xmm0,%xmm13,%xmm13
- vpshufb %xmm0,%xmm14,%xmm14
- vpshufb %xmm0,%xmm1,%xmm1
- jmp .Lresume_ctr32
-
-.align 32
-.Lenc_tail:
- vaesenc %xmm15,%xmm9,%xmm9
- vmovdqu %xmm7,16+8(%rsp)
- vpalignr $8,%xmm4,%xmm4,%xmm8
- vaesenc %xmm15,%xmm10,%xmm10
- vpclmulqdq $0x10,%xmm3,%xmm4,%xmm4
- vpxor 0(%rdi),%xmm1,%xmm2
- vaesenc %xmm15,%xmm11,%xmm11
- vpxor 16(%rdi),%xmm1,%xmm0
- vaesenc %xmm15,%xmm12,%xmm12
- vpxor 32(%rdi),%xmm1,%xmm5
- vaesenc %xmm15,%xmm13,%xmm13
- vpxor 48(%rdi),%xmm1,%xmm6
- vaesenc %xmm15,%xmm14,%xmm14
- vpxor 64(%rdi),%xmm1,%xmm7
- vpxor 80(%rdi),%xmm1,%xmm3
- vmovdqu (%r8),%xmm1
-
- vaesenclast %xmm2,%xmm9,%xmm9
- vmovdqu 32(%r11),%xmm2
- vaesenclast %xmm0,%xmm10,%xmm10
- vpaddb %xmm2,%xmm1,%xmm0
- movq %r13,112+8(%rsp)
- leaq 96(%rdi),%rdi
- vaesenclast %xmm5,%xmm11,%xmm11
- vpaddb %xmm2,%xmm0,%xmm5
- movq %r12,120+8(%rsp)
- leaq 96(%rsi),%rsi
- vmovdqu 0-128(%rcx),%xmm15
- vaesenclast %xmm6,%xmm12,%xmm12
- vpaddb %xmm2,%xmm5,%xmm6
- vaesenclast %xmm7,%xmm13,%xmm13
- vpaddb %xmm2,%xmm6,%xmm7
- vaesenclast %xmm3,%xmm14,%xmm14
- vpaddb %xmm2,%xmm7,%xmm3
-
- addq $0x60,%r10
- subq $0x6,%rdx
- jc .L6x_done
-
- vmovups %xmm9,-96(%rsi)
- vpxor %xmm15,%xmm1,%xmm9
- vmovups %xmm10,-80(%rsi)
- vmovdqa %xmm0,%xmm10
- vmovups %xmm11,-64(%rsi)
- vmovdqa %xmm5,%xmm11
- vmovups %xmm12,-48(%rsi)
- vmovdqa %xmm6,%xmm12
- vmovups %xmm13,-32(%rsi)
- vmovdqa %xmm7,%xmm13
- vmovups %xmm14,-16(%rsi)
- vmovdqa %xmm3,%xmm14
- vmovdqu 32+8(%rsp),%xmm7
- jmp .Loop6x
-
-.L6x_done:
- vpxor 16+8(%rsp),%xmm8,%xmm8
- vpxor %xmm4,%xmm8,%xmm8
-
+.globl aesni_gcm_encrypt
+.type aesni_gcm_encrypt,@function
+aesni_gcm_encrypt:
+.cfi_startproc
+ xorl %eax,%eax
.byte 0xf3,0xc3
-.size _aesni_ctr32_ghash_6x,.-_aesni_ctr32_ghash_6x
+.cfi_endproc
+.size aesni_gcm_encrypt,.-aesni_gcm_encrypt
+
.globl aesni_gcm_decrypt
.type aesni_gcm_decrypt,@function
-.align 32
aesni_gcm_decrypt:
.cfi_startproc
- xorq %r10,%r10
- cmpq $0x60,%rdx
- jb .Lgcm_dec_abort
-
- leaq (%rsp),%rax
-.cfi_def_cfa_register %rax
- pushq %rbx
-.cfi_offset %rbx,-16
- pushq %rbp
-.cfi_offset %rbp,-24
- pushq %r12
-.cfi_offset %r12,-32
- pushq %r13
-.cfi_offset %r13,-40
- pushq %r14
-.cfi_offset %r14,-48
- pushq %r15
-.cfi_offset %r15,-56
- vzeroupper
-
- vmovdqu (%r8),%xmm1
- addq $-128,%rsp
- movl 12(%r8),%ebx
- leaq .Lbswap_mask(%rip),%r11
- leaq -128(%rcx),%r14
- movq $0xf80,%r15
- vmovdqu (%r9),%xmm8
- andq $-128,%rsp
- vmovdqu (%r11),%xmm0
- leaq 128(%rcx),%rcx
- leaq 32+32(%r9),%r9
- movl 240-128(%rcx),%ebp
- vpshufb %xmm0,%xmm8,%xmm8
-
- andq %r15,%r14
- andq %rsp,%r15
- subq %r14,%r15
- jc .Ldec_no_key_aliasing
- cmpq $768,%r15
- jnc .Ldec_no_key_aliasing
- subq %r15,%rsp
-.Ldec_no_key_aliasing:
-
- vmovdqu 80(%rdi),%xmm7
- leaq (%rdi),%r14
- vmovdqu 64(%rdi),%xmm4
- leaq -192(%rdi,%rdx,1),%r15
- vmovdqu 48(%rdi),%xmm5
- shrq $4,%rdx
- xorq %r10,%r10
- vmovdqu 32(%rdi),%xmm6
- vpshufb %xmm0,%xmm7,%xmm7
- vmovdqu 16(%rdi),%xmm2
- vpshufb %xmm0,%xmm4,%xmm4
- vmovdqu (%rdi),%xmm3
- vpshufb %xmm0,%xmm5,%xmm5
- vmovdqu %xmm4,48(%rsp)
- vpshufb %xmm0,%xmm6,%xmm6
- vmovdqu %xmm5,64(%rsp)
- vpshufb %xmm0,%xmm2,%xmm2
- vmovdqu %xmm6,80(%rsp)
- vpshufb %xmm0,%xmm3,%xmm3
- vmovdqu %xmm2,96(%rsp)
- vmovdqu %xmm3,112(%rsp)
-
- call _aesni_ctr32_ghash_6x
-
- vmovups %xmm9,-96(%rsi)
- vmovups %xmm10,-80(%rsi)
- vmovups %xmm11,-64(%rsi)
- vmovups %xmm12,-48(%rsi)
- vmovups %xmm13,-32(%rsi)
- vmovups %xmm14,-16(%rsi)
-
- vpshufb (%r11),%xmm8,%xmm8
- vmovdqu %xmm8,-64(%r9)
-
- vzeroupper
- movq -48(%rax),%r15
-.cfi_restore %r15
- movq -40(%rax),%r14
-.cfi_restore %r14
- movq -32(%rax),%r13
-.cfi_restore %r13
- movq -24(%rax),%r12
-.cfi_restore %r12
- movq -16(%rax),%rbp
-.cfi_restore %rbp
- movq -8(%rax),%rbx
-.cfi_restore %rbx
- leaq (%rax),%rsp
-.cfi_def_cfa_register %rsp
-.Lgcm_dec_abort:
- movq %r10,%rax
+ xorl %eax,%eax
.byte 0xf3,0xc3
.cfi_endproc
.size aesni_gcm_decrypt,.-aesni_gcm_decrypt
-.type _aesni_ctr32_6x,@function
-.align 32
-_aesni_ctr32_6x:
- vmovdqu 0-128(%rcx),%xmm4
- vmovdqu 32(%r11),%xmm2
- leaq -1(%rbp),%r13
- vmovups 16-128(%rcx),%xmm15
- leaq 32-128(%rcx),%r12
- vpxor %xmm4,%xmm1,%xmm9
- addl $100663296,%ebx
- jc .Lhandle_ctr32_2
- vpaddb %xmm2,%xmm1,%xmm10
- vpaddb %xmm2,%xmm10,%xmm11
- vpxor %xmm4,%xmm10,%xmm10
- vpaddb %xmm2,%xmm11,%xmm12
- vpxor %xmm4,%xmm11,%xmm11
- vpaddb %xmm2,%xmm12,%xmm13
- vpxor %xmm4,%xmm12,%xmm12
- vpaddb %xmm2,%xmm13,%xmm14
- vpxor %xmm4,%xmm13,%xmm13
- vpaddb %xmm2,%xmm14,%xmm1
- vpxor %xmm4,%xmm14,%xmm14
- jmp .Loop_ctr32
-
-.align 16
-.Loop_ctr32:
- vaesenc %xmm15,%xmm9,%xmm9
- vaesenc %xmm15,%xmm10,%xmm10
- vaesenc %xmm15,%xmm11,%xmm11
- vaesenc %xmm15,%xmm12,%xmm12
- vaesenc %xmm15,%xmm13,%xmm13
- vaesenc %xmm15,%xmm14,%xmm14
- vmovups (%r12),%xmm15
- leaq 16(%r12),%r12
- decl %r13d
- jnz .Loop_ctr32
-
- vmovdqu (%r12),%xmm3
- vaesenc %xmm15,%xmm9,%xmm9
- vpxor 0(%rdi),%xmm3,%xmm4
- vaesenc %xmm15,%xmm10,%xmm10
- vpxor 16(%rdi),%xmm3,%xmm5
- vaesenc %xmm15,%xmm11,%xmm11
- vpxor 32(%rdi),%xmm3,%xmm6
- vaesenc %xmm15,%xmm12,%xmm12
- vpxor 48(%rdi),%xmm3,%xmm8
- vaesenc %xmm15,%xmm13,%xmm13
- vpxor 64(%rdi),%xmm3,%xmm2
- vaesenc %xmm15,%xmm14,%xmm14
- vpxor 80(%rdi),%xmm3,%xmm3
- leaq 96(%rdi),%rdi
-
- vaesenclast %xmm4,%xmm9,%xmm9
- vaesenclast %xmm5,%xmm10,%xmm10
- vaesenclast %xmm6,%xmm11,%xmm11
- vaesenclast %xmm8,%xmm12,%xmm12
- vaesenclast %xmm2,%xmm13,%xmm13
- vaesenclast %xmm3,%xmm14,%xmm14
- vmovups %xmm9,0(%rsi)
- vmovups %xmm10,16(%rsi)
- vmovups %xmm11,32(%rsi)
- vmovups %xmm12,48(%rsi)
- vmovups %xmm13,64(%rsi)
- vmovups %xmm14,80(%rsi)
- leaq 96(%rsi),%rsi
-
- .byte 0xf3,0xc3
-.align 32
-.Lhandle_ctr32_2:
- vpshufb %xmm0,%xmm1,%xmm6
- vmovdqu 48(%r11),%xmm5
- vpaddd 64(%r11),%xmm6,%xmm10
- vpaddd %xmm5,%xmm6,%xmm11
- vpaddd %xmm5,%xmm10,%xmm12
- vpshufb %xmm0,%xmm10,%xmm10
- vpaddd %xmm5,%xmm11,%xmm13
- vpshufb %xmm0,%xmm11,%xmm11
- vpxor %xmm4,%xmm10,%xmm10
- vpaddd %xmm5,%xmm12,%xmm14
- vpshufb %xmm0,%xmm12,%xmm12
- vpxor %xmm4,%xmm11,%xmm11
- vpaddd %xmm5,%xmm13,%xmm1
- vpshufb %xmm0,%xmm13,%xmm13
- vpxor %xmm4,%xmm12,%xmm12
- vpshufb %xmm0,%xmm14,%xmm14
- vpxor %xmm4,%xmm13,%xmm13
- vpshufb %xmm0,%xmm1,%xmm1
- vpxor %xmm4,%xmm14,%xmm14
- jmp .Loop_ctr32
-.size _aesni_ctr32_6x,.-_aesni_ctr32_6x
-
-.globl aesni_gcm_encrypt
-.type aesni_gcm_encrypt,@function
-.align 32
-aesni_gcm_encrypt:
-.cfi_startproc
- xorq %r10,%r10
- cmpq $288,%rdx
- jb .Lgcm_enc_abort
-
- leaq (%rsp),%rax
-.cfi_def_cfa_register %rax
- pushq %rbx
-.cfi_offset %rbx,-16
- pushq %rbp
-.cfi_offset %rbp,-24
- pushq %r12
-.cfi_offset %r12,-32
- pushq %r13
-.cfi_offset %r13,-40
- pushq %r14
-.cfi_offset %r14,-48
- pushq %r15
-.cfi_offset %r15,-56
- vzeroupper
-
- vmovdqu (%r8),%xmm1
- addq $-128,%rsp
- movl 12(%r8),%ebx
- leaq .Lbswap_mask(%rip),%r11
- leaq -128(%rcx),%r14
- movq $0xf80,%r15
- leaq 128(%rcx),%rcx
- vmovdqu (%r11),%xmm0
- andq $-128,%rsp
- movl 240-128(%rcx),%ebp
-
- andq %r15,%r14
- andq %rsp,%r15
- subq %r14,%r15
- jc .Lenc_no_key_aliasing
- cmpq $768,%r15
- jnc .Lenc_no_key_aliasing
- subq %r15,%rsp
-.Lenc_no_key_aliasing:
-
- leaq (%rsi),%r14
- leaq -192(%rsi,%rdx,1),%r15
- shrq $4,%rdx
-
- call _aesni_ctr32_6x
- vpshufb %xmm0,%xmm9,%xmm8
- vpshufb %xmm0,%xmm10,%xmm2
- vmovdqu %xmm8,112(%rsp)
- vpshufb %xmm0,%xmm11,%xmm4
- vmovdqu %xmm2,96(%rsp)
- vpshufb %xmm0,%xmm12,%xmm5
- vmovdqu %xmm4,80(%rsp)
- vpshufb %xmm0,%xmm13,%xmm6
- vmovdqu %xmm5,64(%rsp)
- vpshufb %xmm0,%xmm14,%xmm7
- vmovdqu %xmm6,48(%rsp)
-
- call _aesni_ctr32_6x
-
- vmovdqu (%r9),%xmm8
- leaq 32+32(%r9),%r9
- subq $12,%rdx
- movq $192,%r10
- vpshufb %xmm0,%xmm8,%xmm8
-
- call _aesni_ctr32_ghash_6x
- vmovdqu 32(%rsp),%xmm7
- vmovdqu (%r11),%xmm0
- vmovdqu 0-32(%r9),%xmm3
- vpunpckhqdq %xmm7,%xmm7,%xmm1
- vmovdqu 32-32(%r9),%xmm15
- vmovups %xmm9,-96(%rsi)
- vpshufb %xmm0,%xmm9,%xmm9
- vpxor %xmm7,%xmm1,%xmm1
- vmovups %xmm10,-80(%rsi)
- vpshufb %xmm0,%xmm10,%xmm10
- vmovups %xmm11,-64(%rsi)
- vpshufb %xmm0,%xmm11,%xmm11
- vmovups %xmm12,-48(%rsi)
- vpshufb %xmm0,%xmm12,%xmm12
- vmovups %xmm13,-32(%rsi)
- vpshufb %xmm0,%xmm13,%xmm13
- vmovups %xmm14,-16(%rsi)
- vpshufb %xmm0,%xmm14,%xmm14
- vmovdqu %xmm9,16(%rsp)
- vmovdqu 48(%rsp),%xmm6
- vmovdqu 16-32(%r9),%xmm0
- vpunpckhqdq %xmm6,%xmm6,%xmm2
- vpclmulqdq $0x00,%xmm3,%xmm7,%xmm5
- vpxor %xmm6,%xmm2,%xmm2
- vpclmulqdq $0x11,%xmm3,%xmm7,%xmm7
- vpclmulqdq $0x00,%xmm15,%xmm1,%xmm1
-
- vmovdqu 64(%rsp),%xmm9
- vpclmulqdq $0x00,%xmm0,%xmm6,%xmm4
- vmovdqu 48-32(%r9),%xmm3
- vpxor %xmm5,%xmm4,%xmm4
- vpunpckhqdq %xmm9,%xmm9,%xmm5
- vpclmulqdq $0x11,%xmm0,%xmm6,%xmm6
- vpxor %xmm9,%xmm5,%xmm5
- vpxor %xmm7,%xmm6,%xmm6
- vpclmulqdq $0x10,%xmm15,%xmm2,%xmm2
- vmovdqu 80-32(%r9),%xmm15
- vpxor %xmm1,%xmm2,%xmm2
-
- vmovdqu 80(%rsp),%xmm1
- vpclmulqdq $0x00,%xmm3,%xmm9,%xmm7
- vmovdqu 64-32(%r9),%xmm0
- vpxor %xmm4,%xmm7,%xmm7
- vpunpckhqdq %xmm1,%xmm1,%xmm4
- vpclmulqdq $0x11,%xmm3,%xmm9,%xmm9
- vpxor %xmm1,%xmm4,%xmm4
- vpxor %xmm6,%xmm9,%xmm9
- vpclmulqdq $0x00,%xmm15,%xmm5,%xmm5
- vpxor %xmm2,%xmm5,%xmm5
-
- vmovdqu 96(%rsp),%xmm2
- vpclmulqdq $0x00,%xmm0,%xmm1,%xmm6
- vmovdqu 96-32(%r9),%xmm3
- vpxor %xmm7,%xmm6,%xmm6
- vpunpckhqdq %xmm2,%xmm2,%xmm7
- vpclmulqdq $0x11,%xmm0,%xmm1,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpxor %xmm9,%xmm1,%xmm1
- vpclmulqdq $0x10,%xmm15,%xmm4,%xmm4
- vmovdqu 128-32(%r9),%xmm15
- vpxor %xmm5,%xmm4,%xmm4
-
- vpxor 112(%rsp),%xmm8,%xmm8
- vpclmulqdq $0x00,%xmm3,%xmm2,%xmm5
- vmovdqu 112-32(%r9),%xmm0
- vpunpckhqdq %xmm8,%xmm8,%xmm9
- vpxor %xmm6,%xmm5,%xmm5
- vpclmulqdq $0x11,%xmm3,%xmm2,%xmm2
- vpxor %xmm8,%xmm9,%xmm9
- vpxor %xmm1,%xmm2,%xmm2
- vpclmulqdq $0x00,%xmm15,%xmm7,%xmm7
- vpxor %xmm4,%xmm7,%xmm4
-
- vpclmulqdq $0x00,%xmm0,%xmm8,%xmm6
- vmovdqu 0-32(%r9),%xmm3
- vpunpckhqdq %xmm14,%xmm14,%xmm1
- vpclmulqdq $0x11,%xmm0,%xmm8,%xmm8
- vpxor %xmm14,%xmm1,%xmm1
- vpxor %xmm5,%xmm6,%xmm5
- vpclmulqdq $0x10,%xmm15,%xmm9,%xmm9
- vmovdqu 32-32(%r9),%xmm15
- vpxor %xmm2,%xmm8,%xmm7
- vpxor %xmm4,%xmm9,%xmm6
-
- vmovdqu 16-32(%r9),%xmm0
- vpxor %xmm5,%xmm7,%xmm9
- vpclmulqdq $0x00,%xmm3,%xmm14,%xmm4
- vpxor %xmm9,%xmm6,%xmm6
- vpunpckhqdq %xmm13,%xmm13,%xmm2
- vpclmulqdq $0x11,%xmm3,%xmm14,%xmm14
- vpxor %xmm13,%xmm2,%xmm2
- vpslldq $8,%xmm6,%xmm9
- vpclmulqdq $0x00,%xmm15,%xmm1,%xmm1
- vpxor %xmm9,%xmm5,%xmm8
- vpsrldq $8,%xmm6,%xmm6
- vpxor %xmm6,%xmm7,%xmm7
-
- vpclmulqdq $0x00,%xmm0,%xmm13,%xmm5
- vmovdqu 48-32(%r9),%xmm3
- vpxor %xmm4,%xmm5,%xmm5
- vpunpckhqdq %xmm12,%xmm12,%xmm9
- vpclmulqdq $0x11,%xmm0,%xmm13,%xmm13
- vpxor %xmm12,%xmm9,%xmm9
- vpxor %xmm14,%xmm13,%xmm13
- vpalignr $8,%xmm8,%xmm8,%xmm14
- vpclmulqdq $0x10,%xmm15,%xmm2,%xmm2
- vmovdqu 80-32(%r9),%xmm15
- vpxor %xmm1,%xmm2,%xmm2
-
- vpclmulqdq $0x00,%xmm3,%xmm12,%xmm4
- vmovdqu 64-32(%r9),%xmm0
- vpxor %xmm5,%xmm4,%xmm4
- vpunpckhqdq %xmm11,%xmm11,%xmm1
- vpclmulqdq $0x11,%xmm3,%xmm12,%xmm12
- vpxor %xmm11,%xmm1,%xmm1
- vpxor %xmm13,%xmm12,%xmm12
- vxorps 16(%rsp),%xmm7,%xmm7
- vpclmulqdq $0x00,%xmm15,%xmm9,%xmm9
- vpxor %xmm2,%xmm9,%xmm9
-
- vpclmulqdq $0x10,16(%r11),%xmm8,%xmm8
- vxorps %xmm14,%xmm8,%xmm8
-
- vpclmulqdq $0x00,%xmm0,%xmm11,%xmm5
- vmovdqu 96-32(%r9),%xmm3
- vpxor %xmm4,%xmm5,%xmm5
- vpunpckhqdq %xmm10,%xmm10,%xmm2
- vpclmulqdq $0x11,%xmm0,%xmm11,%xmm11
- vpxor %xmm10,%xmm2,%xmm2
- vpalignr $8,%xmm8,%xmm8,%xmm14
- vpxor %xmm12,%xmm11,%xmm11
- vpclmulqdq $0x10,%xmm15,%xmm1,%xmm1
- vmovdqu 128-32(%r9),%xmm15
- vpxor %xmm9,%xmm1,%xmm1
-
- vxorps %xmm7,%xmm14,%xmm14
- vpclmulqdq $0x10,16(%r11),%xmm8,%xmm8
- vxorps %xmm14,%xmm8,%xmm8
-
- vpclmulqdq $0x00,%xmm3,%xmm10,%xmm4
- vmovdqu 112-32(%r9),%xmm0
- vpxor %xmm5,%xmm4,%xmm4
- vpunpckhqdq %xmm8,%xmm8,%xmm9
- vpclmulqdq $0x11,%xmm3,%xmm10,%xmm10
- vpxor %xmm8,%xmm9,%xmm9
- vpxor %xmm11,%xmm10,%xmm10
- vpclmulqdq $0x00,%xmm15,%xmm2,%xmm2
- vpxor %xmm1,%xmm2,%xmm2
-
- vpclmulqdq $0x00,%xmm0,%xmm8,%xmm5
- vpclmulqdq $0x11,%xmm0,%xmm8,%xmm7
- vpxor %xmm4,%xmm5,%xmm5
- vpclmulqdq $0x10,%xmm15,%xmm9,%xmm6
- vpxor %xmm10,%xmm7,%xmm7
- vpxor %xmm2,%xmm6,%xmm6
-
- vpxor %xmm5,%xmm7,%xmm4
- vpxor %xmm4,%xmm6,%xmm6
- vpslldq $8,%xmm6,%xmm1
- vmovdqu 16(%r11),%xmm3
- vpsrldq $8,%xmm6,%xmm6
- vpxor %xmm1,%xmm5,%xmm8
- vpxor %xmm6,%xmm7,%xmm7
-
- vpalignr $8,%xmm8,%xmm8,%xmm2
- vpclmulqdq $0x10,%xmm3,%xmm8,%xmm8
- vpxor %xmm2,%xmm8,%xmm8
-
- vpalignr $8,%xmm8,%xmm8,%xmm2
- vpclmulqdq $0x10,%xmm3,%xmm8,%xmm8
- vpxor %xmm7,%xmm2,%xmm2
- vpxor %xmm2,%xmm8,%xmm8
- vpshufb (%r11),%xmm8,%xmm8
- vmovdqu %xmm8,-64(%r9)
-
- vzeroupper
- movq -48(%rax),%r15
-.cfi_restore %r15
- movq -40(%rax),%r14
-.cfi_restore %r14
- movq -32(%rax),%r13
-.cfi_restore %r13
- movq -24(%rax),%r12
-.cfi_restore %r12
- movq -16(%rax),%rbp
-.cfi_restore %rbp
- movq -8(%rax),%rbx
-.cfi_restore %rbx
- leaq (%rax),%rsp
-.cfi_def_cfa_register %rsp
-.Lgcm_enc_abort:
- movq %r10,%rax
- .byte 0xf3,0xc3
-.cfi_endproc
-.size aesni_gcm_encrypt,.-aesni_gcm_encrypt
-.align 64
-.Lbswap_mask:
-.byte 15,14,13,12,11,10,9,8,7,6,5,4,3,2,1,0
-.Lpoly:
-.byte 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0xc2
-.Lone_msb:
-.byte 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1
-.Ltwo_lsb:
-.byte 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-.Lone_lsb:
-.byte 1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0
-.byte 65,69,83,45,78,73,32,71,67,77,32,109,111,100,117,108,101,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
-.align 64
diff --git a/secure/lib/libcrypto/amd64/aesni-mb-x86_64.S b/secure/lib/libcrypto/amd64/aesni-mb-x86_64.S
index 706c5c59d38d0..de4bac9488f7f 100644
--- a/secure/lib/libcrypto/amd64/aesni-mb-x86_64.S
+++ b/secure/lib/libcrypto/amd64/aesni-mb-x86_64.S
@@ -9,14 +9,6 @@
.align 32
aesni_multi_cbc_encrypt:
.cfi_startproc
- cmpl $2,%edx
- jb .Lenc_non_avx
- movl OPENSSL_ia32cap_P+4(%rip),%ecx
- testl $268435456,%ecx
- jnz _avx_cbc_enc_shortcut
- jmp .Lenc_non_avx
-.align 16
-.Lenc_non_avx:
movq %rsp,%rax
.cfi_def_cfa_register %rax
pushq %rbx
@@ -291,14 +283,6 @@ aesni_multi_cbc_encrypt:
.align 32
aesni_multi_cbc_decrypt:
.cfi_startproc
- cmpl $2,%edx
- jb .Ldec_non_avx
- movl OPENSSL_ia32cap_P+4(%rip),%ecx
- testl $268435456,%ecx
- jnz _avx_cbc_dec_shortcut
- jmp .Ldec_non_avx
-.align 16
-.Ldec_non_avx:
movq %rsp,%rax
.cfi_def_cfa_register %rax
pushq %rbx
@@ -558,952 +542,3 @@ aesni_multi_cbc_decrypt:
.byte 0xf3,0xc3
.cfi_endproc
.size aesni_multi_cbc_decrypt,.-aesni_multi_cbc_decrypt
-.type aesni_multi_cbc_encrypt_avx,@function
-.align 32
-aesni_multi_cbc_encrypt_avx:
-.cfi_startproc
-_avx_cbc_enc_shortcut:
- movq %rsp,%rax
-.cfi_def_cfa_register %rax
- pushq %rbx
-.cfi_offset %rbx,-16
- pushq %rbp
-.cfi_offset %rbp,-24
- pushq %r12
-.cfi_offset %r12,-32
- pushq %r13
-.cfi_offset %r13,-40
- pushq %r14
-.cfi_offset %r14,-48
- pushq %r15
-.cfi_offset %r15,-56
-
-
-
-
-
-
-
-
- subq $192,%rsp
- andq $-128,%rsp
- movq %rax,16(%rsp)
-.cfi_escape 0x0f,0x05,0x77,0x10,0x06,0x23,0x08
-
-.Lenc8x_body:
- vzeroupper
- vmovdqu (%rsi),%xmm15
- leaq 120(%rsi),%rsi
- leaq 160(%rdi),%rdi
- shrl $1,%edx
-
-.Lenc8x_loop_grande:
-
- xorl %edx,%edx
- movl -144(%rdi),%ecx
- movq -160(%rdi),%r8
- cmpl %edx,%ecx
- movq -152(%rdi),%rbx
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- vmovdqu -136(%rdi),%xmm2
- movl %ecx,32(%rsp)
- cmovleq %rsp,%r8
- subq %r8,%rbx
- movq %rbx,64(%rsp)
- movl -104(%rdi),%ecx
- movq -120(%rdi),%r9
- cmpl %edx,%ecx
- movq -112(%rdi),%rbp
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- vmovdqu -96(%rdi),%xmm3
- movl %ecx,36(%rsp)
- cmovleq %rsp,%r9
- subq %r9,%rbp
- movq %rbp,72(%rsp)
- movl -64(%rdi),%ecx
- movq -80(%rdi),%r10
- cmpl %edx,%ecx
- movq -72(%rdi),%rbp
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- vmovdqu -56(%rdi),%xmm4
- movl %ecx,40(%rsp)
- cmovleq %rsp,%r10
- subq %r10,%rbp
- movq %rbp,80(%rsp)
- movl -24(%rdi),%ecx
- movq -40(%rdi),%r11
- cmpl %edx,%ecx
- movq -32(%rdi),%rbp
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- vmovdqu -16(%rdi),%xmm5
- movl %ecx,44(%rsp)
- cmovleq %rsp,%r11
- subq %r11,%rbp
- movq %rbp,88(%rsp)
- movl 16(%rdi),%ecx
- movq 0(%rdi),%r12
- cmpl %edx,%ecx
- movq 8(%rdi),%rbp
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- vmovdqu 24(%rdi),%xmm6
- movl %ecx,48(%rsp)
- cmovleq %rsp,%r12
- subq %r12,%rbp
- movq %rbp,96(%rsp)
- movl 56(%rdi),%ecx
- movq 40(%rdi),%r13
- cmpl %edx,%ecx
- movq 48(%rdi),%rbp
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- vmovdqu 64(%rdi),%xmm7
- movl %ecx,52(%rsp)
- cmovleq %rsp,%r13
- subq %r13,%rbp
- movq %rbp,104(%rsp)
- movl 96(%rdi),%ecx
- movq 80(%rdi),%r14
- cmpl %edx,%ecx
- movq 88(%rdi),%rbp
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- vmovdqu 104(%rdi),%xmm8
- movl %ecx,56(%rsp)
- cmovleq %rsp,%r14
- subq %r14,%rbp
- movq %rbp,112(%rsp)
- movl 136(%rdi),%ecx
- movq 120(%rdi),%r15
- cmpl %edx,%ecx
- movq 128(%rdi),%rbp
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- vmovdqu 144(%rdi),%xmm9
- movl %ecx,60(%rsp)
- cmovleq %rsp,%r15
- subq %r15,%rbp
- movq %rbp,120(%rsp)
- testl %edx,%edx
- jz .Lenc8x_done
-
- vmovups 16-120(%rsi),%xmm1
- vmovups 32-120(%rsi),%xmm0
- movl 240-120(%rsi),%eax
-
- vpxor (%r8),%xmm15,%xmm10
- leaq 128(%rsp),%rbp
- vpxor (%r9),%xmm15,%xmm11
- vpxor (%r10),%xmm15,%xmm12
- vpxor (%r11),%xmm15,%xmm13
- vpxor %xmm10,%xmm2,%xmm2
- vpxor (%r12),%xmm15,%xmm10
- vpxor %xmm11,%xmm3,%xmm3
- vpxor (%r13),%xmm15,%xmm11
- vpxor %xmm12,%xmm4,%xmm4
- vpxor (%r14),%xmm15,%xmm12
- vpxor %xmm13,%xmm5,%xmm5
- vpxor (%r15),%xmm15,%xmm13
- vpxor %xmm10,%xmm6,%xmm6
- movl $1,%ecx
- vpxor %xmm11,%xmm7,%xmm7
- vpxor %xmm12,%xmm8,%xmm8
- vpxor %xmm13,%xmm9,%xmm9
- jmp .Loop_enc8x
-
-.align 32
-.Loop_enc8x:
- vaesenc %xmm1,%xmm2,%xmm2
- cmpl 32+0(%rsp),%ecx
- vaesenc %xmm1,%xmm3,%xmm3
- prefetcht0 31(%r8)
- vaesenc %xmm1,%xmm4,%xmm4
- vaesenc %xmm1,%xmm5,%xmm5
- leaq (%r8,%rbx,1),%rbx
- cmovgeq %rsp,%r8
- vaesenc %xmm1,%xmm6,%xmm6
- cmovgq %rsp,%rbx
- vaesenc %xmm1,%xmm7,%xmm7
- subq %r8,%rbx
- vaesenc %xmm1,%xmm8,%xmm8
- vpxor 16(%r8),%xmm15,%xmm10
- movq %rbx,64+0(%rsp)
- vaesenc %xmm1,%xmm9,%xmm9
- vmovups -72(%rsi),%xmm1
- leaq 16(%r8,%rbx,1),%r8
- vmovdqu %xmm10,0(%rbp)
- vaesenc %xmm0,%xmm2,%xmm2
- cmpl 32+4(%rsp),%ecx
- movq 64+8(%rsp),%rbx
- vaesenc %xmm0,%xmm3,%xmm3
- prefetcht0 31(%r9)
- vaesenc %xmm0,%xmm4,%xmm4
- vaesenc %xmm0,%xmm5,%xmm5
- leaq (%r9,%rbx,1),%rbx
- cmovgeq %rsp,%r9
- vaesenc %xmm0,%xmm6,%xmm6
- cmovgq %rsp,%rbx
- vaesenc %xmm0,%xmm7,%xmm7
- subq %r9,%rbx
- vaesenc %xmm0,%xmm8,%xmm8
- vpxor 16(%r9),%xmm15,%xmm11
- movq %rbx,64+8(%rsp)
- vaesenc %xmm0,%xmm9,%xmm9
- vmovups -56(%rsi),%xmm0
- leaq 16(%r9,%rbx,1),%r9
- vmovdqu %xmm11,16(%rbp)
- vaesenc %xmm1,%xmm2,%xmm2
- cmpl 32+8(%rsp),%ecx
- movq 64+16(%rsp),%rbx
- vaesenc %xmm1,%xmm3,%xmm3
- prefetcht0 31(%r10)
- vaesenc %xmm1,%xmm4,%xmm4
- prefetcht0 15(%r8)
- vaesenc %xmm1,%xmm5,%xmm5
- leaq (%r10,%rbx,1),%rbx
- cmovgeq %rsp,%r10
- vaesenc %xmm1,%xmm6,%xmm6
- cmovgq %rsp,%rbx
- vaesenc %xmm1,%xmm7,%xmm7
- subq %r10,%rbx
- vaesenc %xmm1,%xmm8,%xmm8
- vpxor 16(%r10),%xmm15,%xmm12
- movq %rbx,64+16(%rsp)
- vaesenc %xmm1,%xmm9,%xmm9
- vmovups -40(%rsi),%xmm1
- leaq 16(%r10,%rbx,1),%r10
- vmovdqu %xmm12,32(%rbp)
- vaesenc %xmm0,%xmm2,%xmm2
- cmpl 32+12(%rsp),%ecx
- movq 64+24(%rsp),%rbx
- vaesenc %xmm0,%xmm3,%xmm3
- prefetcht0 31(%r11)
- vaesenc %xmm0,%xmm4,%xmm4
- prefetcht0 15(%r9)
- vaesenc %xmm0,%xmm5,%xmm5
- leaq (%r11,%rbx,1),%rbx
- cmovgeq %rsp,%r11
- vaesenc %xmm0,%xmm6,%xmm6
- cmovgq %rsp,%rbx
- vaesenc %xmm0,%xmm7,%xmm7
- subq %r11,%rbx
- vaesenc %xmm0,%xmm8,%xmm8
- vpxor 16(%r11),%xmm15,%xmm13
- movq %rbx,64+24(%rsp)
- vaesenc %xmm0,%xmm9,%xmm9
- vmovups -24(%rsi),%xmm0
- leaq 16(%r11,%rbx,1),%r11
- vmovdqu %xmm13,48(%rbp)
- vaesenc %xmm1,%xmm2,%xmm2
- cmpl 32+16(%rsp),%ecx
- movq 64+32(%rsp),%rbx
- vaesenc %xmm1,%xmm3,%xmm3
- prefetcht0 31(%r12)
- vaesenc %xmm1,%xmm4,%xmm4
- prefetcht0 15(%r10)
- vaesenc %xmm1,%xmm5,%xmm5
- leaq (%r12,%rbx,1),%rbx
- cmovgeq %rsp,%r12
- vaesenc %xmm1,%xmm6,%xmm6
- cmovgq %rsp,%rbx
- vaesenc %xmm1,%xmm7,%xmm7
- subq %r12,%rbx
- vaesenc %xmm1,%xmm8,%xmm8
- vpxor 16(%r12),%xmm15,%xmm10
- movq %rbx,64+32(%rsp)
- vaesenc %xmm1,%xmm9,%xmm9
- vmovups -8(%rsi),%xmm1
- leaq 16(%r12,%rbx,1),%r12
- vaesenc %xmm0,%xmm2,%xmm2
- cmpl 32+20(%rsp),%ecx
- movq 64+40(%rsp),%rbx
- vaesenc %xmm0,%xmm3,%xmm3
- prefetcht0 31(%r13)
- vaesenc %xmm0,%xmm4,%xmm4
- prefetcht0 15(%r11)
- vaesenc %xmm0,%xmm5,%xmm5
- leaq (%rbx,%r13,1),%rbx
- cmovgeq %rsp,%r13
- vaesenc %xmm0,%xmm6,%xmm6
- cmovgq %rsp,%rbx
- vaesenc %xmm0,%xmm7,%xmm7
- subq %r13,%rbx
- vaesenc %xmm0,%xmm8,%xmm8
- vpxor 16(%r13),%xmm15,%xmm11
- movq %rbx,64+40(%rsp)
- vaesenc %xmm0,%xmm9,%xmm9
- vmovups 8(%rsi),%xmm0
- leaq 16(%r13,%rbx,1),%r13
- vaesenc %xmm1,%xmm2,%xmm2
- cmpl 32+24(%rsp),%ecx
- movq 64+48(%rsp),%rbx
- vaesenc %xmm1,%xmm3,%xmm3
- prefetcht0 31(%r14)
- vaesenc %xmm1,%xmm4,%xmm4
- prefetcht0 15(%r12)
- vaesenc %xmm1,%xmm5,%xmm5
- leaq (%r14,%rbx,1),%rbx
- cmovgeq %rsp,%r14
- vaesenc %xmm1,%xmm6,%xmm6
- cmovgq %rsp,%rbx
- vaesenc %xmm1,%xmm7,%xmm7
- subq %r14,%rbx
- vaesenc %xmm1,%xmm8,%xmm8
- vpxor 16(%r14),%xmm15,%xmm12
- movq %rbx,64+48(%rsp)
- vaesenc %xmm1,%xmm9,%xmm9
- vmovups 24(%rsi),%xmm1
- leaq 16(%r14,%rbx,1),%r14
- vaesenc %xmm0,%xmm2,%xmm2
- cmpl 32+28(%rsp),%ecx
- movq 64+56(%rsp),%rbx
- vaesenc %xmm0,%xmm3,%xmm3
- prefetcht0 31(%r15)
- vaesenc %xmm0,%xmm4,%xmm4
- prefetcht0 15(%r13)
- vaesenc %xmm0,%xmm5,%xmm5
- leaq (%r15,%rbx,1),%rbx
- cmovgeq %rsp,%r15
- vaesenc %xmm0,%xmm6,%xmm6
- cmovgq %rsp,%rbx
- vaesenc %xmm0,%xmm7,%xmm7
- subq %r15,%rbx
- vaesenc %xmm0,%xmm8,%xmm8
- vpxor 16(%r15),%xmm15,%xmm13
- movq %rbx,64+56(%rsp)
- vaesenc %xmm0,%xmm9,%xmm9
- vmovups 40(%rsi),%xmm0
- leaq 16(%r15,%rbx,1),%r15
- vmovdqu 32(%rsp),%xmm14
- prefetcht0 15(%r14)
- prefetcht0 15(%r15)
- cmpl $11,%eax
- jb .Lenc8x_tail
-
- vaesenc %xmm1,%xmm2,%xmm2
- vaesenc %xmm1,%xmm3,%xmm3
- vaesenc %xmm1,%xmm4,%xmm4
- vaesenc %xmm1,%xmm5,%xmm5
- vaesenc %xmm1,%xmm6,%xmm6
- vaesenc %xmm1,%xmm7,%xmm7
- vaesenc %xmm1,%xmm8,%xmm8
- vaesenc %xmm1,%xmm9,%xmm9
- vmovups 176-120(%rsi),%xmm1
-
- vaesenc %xmm0,%xmm2,%xmm2
- vaesenc %xmm0,%xmm3,%xmm3
- vaesenc %xmm0,%xmm4,%xmm4
- vaesenc %xmm0,%xmm5,%xmm5
- vaesenc %xmm0,%xmm6,%xmm6
- vaesenc %xmm0,%xmm7,%xmm7
- vaesenc %xmm0,%xmm8,%xmm8
- vaesenc %xmm0,%xmm9,%xmm9
- vmovups 192-120(%rsi),%xmm0
- je .Lenc8x_tail
-
- vaesenc %xmm1,%xmm2,%xmm2
- vaesenc %xmm1,%xmm3,%xmm3
- vaesenc %xmm1,%xmm4,%xmm4
- vaesenc %xmm1,%xmm5,%xmm5
- vaesenc %xmm1,%xmm6,%xmm6
- vaesenc %xmm1,%xmm7,%xmm7
- vaesenc %xmm1,%xmm8,%xmm8
- vaesenc %xmm1,%xmm9,%xmm9
- vmovups 208-120(%rsi),%xmm1
-
- vaesenc %xmm0,%xmm2,%xmm2
- vaesenc %xmm0,%xmm3,%xmm3
- vaesenc %xmm0,%xmm4,%xmm4
- vaesenc %xmm0,%xmm5,%xmm5
- vaesenc %xmm0,%xmm6,%xmm6
- vaesenc %xmm0,%xmm7,%xmm7
- vaesenc %xmm0,%xmm8,%xmm8
- vaesenc %xmm0,%xmm9,%xmm9
- vmovups 224-120(%rsi),%xmm0
-
-.Lenc8x_tail:
- vaesenc %xmm1,%xmm2,%xmm2
- vpxor %xmm15,%xmm15,%xmm15
- vaesenc %xmm1,%xmm3,%xmm3
- vaesenc %xmm1,%xmm4,%xmm4
- vpcmpgtd %xmm15,%xmm14,%xmm15
- vaesenc %xmm1,%xmm5,%xmm5
- vaesenc %xmm1,%xmm6,%xmm6
- vpaddd %xmm14,%xmm15,%xmm15
- vmovdqu 48(%rsp),%xmm14
- vaesenc %xmm1,%xmm7,%xmm7
- movq 64(%rsp),%rbx
- vaesenc %xmm1,%xmm8,%xmm8
- vaesenc %xmm1,%xmm9,%xmm9
- vmovups 16-120(%rsi),%xmm1
-
- vaesenclast %xmm0,%xmm2,%xmm2
- vmovdqa %xmm15,32(%rsp)
- vpxor %xmm15,%xmm15,%xmm15
- vaesenclast %xmm0,%xmm3,%xmm3
- vaesenclast %xmm0,%xmm4,%xmm4
- vpcmpgtd %xmm15,%xmm14,%xmm15
- vaesenclast %xmm0,%xmm5,%xmm5
- vaesenclast %xmm0,%xmm6,%xmm6
- vpaddd %xmm15,%xmm14,%xmm14
- vmovdqu -120(%rsi),%xmm15
- vaesenclast %xmm0,%xmm7,%xmm7
- vaesenclast %xmm0,%xmm8,%xmm8
- vmovdqa %xmm14,48(%rsp)
- vaesenclast %xmm0,%xmm9,%xmm9
- vmovups 32-120(%rsi),%xmm0
-
- vmovups %xmm2,-16(%r8)
- subq %rbx,%r8
- vpxor 0(%rbp),%xmm2,%xmm2
- vmovups %xmm3,-16(%r9)
- subq 72(%rsp),%r9
- vpxor 16(%rbp),%xmm3,%xmm3
- vmovups %xmm4,-16(%r10)
- subq 80(%rsp),%r10
- vpxor 32(%rbp),%xmm4,%xmm4
- vmovups %xmm5,-16(%r11)
- subq 88(%rsp),%r11
- vpxor 48(%rbp),%xmm5,%xmm5
- vmovups %xmm6,-16(%r12)
- subq 96(%rsp),%r12
- vpxor %xmm10,%xmm6,%xmm6
- vmovups %xmm7,-16(%r13)
- subq 104(%rsp),%r13
- vpxor %xmm11,%xmm7,%xmm7
- vmovups %xmm8,-16(%r14)
- subq 112(%rsp),%r14
- vpxor %xmm12,%xmm8,%xmm8
- vmovups %xmm9,-16(%r15)
- subq 120(%rsp),%r15
- vpxor %xmm13,%xmm9,%xmm9
-
- decl %edx
- jnz .Loop_enc8x
-
- movq 16(%rsp),%rax
-.cfi_def_cfa %rax,8
-
-
-
-
-
-.Lenc8x_done:
- vzeroupper
- movq -48(%rax),%r15
-.cfi_restore %r15
- movq -40(%rax),%r14
-.cfi_restore %r14
- movq -32(%rax),%r13
-.cfi_restore %r13
- movq -24(%rax),%r12
-.cfi_restore %r12
- movq -16(%rax),%rbp
-.cfi_restore %rbp
- movq -8(%rax),%rbx
-.cfi_restore %rbx
- leaq (%rax),%rsp
-.cfi_def_cfa_register %rsp
-.Lenc8x_epilogue:
- .byte 0xf3,0xc3
-.cfi_endproc
-.size aesni_multi_cbc_encrypt_avx,.-aesni_multi_cbc_encrypt_avx
-
-.type aesni_multi_cbc_decrypt_avx,@function
-.align 32
-aesni_multi_cbc_decrypt_avx:
-.cfi_startproc
-_avx_cbc_dec_shortcut:
- movq %rsp,%rax
-.cfi_def_cfa_register %rax
- pushq %rbx
-.cfi_offset %rbx,-16
- pushq %rbp
-.cfi_offset %rbp,-24
- pushq %r12
-.cfi_offset %r12,-32
- pushq %r13
-.cfi_offset %r13,-40
- pushq %r14
-.cfi_offset %r14,-48
- pushq %r15
-.cfi_offset %r15,-56
-
-
-
-
-
-
-
-
-
- subq $256,%rsp
- andq $-256,%rsp
- subq $192,%rsp
- movq %rax,16(%rsp)
-.cfi_escape 0x0f,0x05,0x77,0x10,0x06,0x23,0x08
-
-.Ldec8x_body:
- vzeroupper
- vmovdqu (%rsi),%xmm15
- leaq 120(%rsi),%rsi
- leaq 160(%rdi),%rdi
- shrl $1,%edx
-
-.Ldec8x_loop_grande:
-
- xorl %edx,%edx
- movl -144(%rdi),%ecx
- movq -160(%rdi),%r8
- cmpl %edx,%ecx
- movq -152(%rdi),%rbx
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- vmovdqu -136(%rdi),%xmm2
- movl %ecx,32(%rsp)
- cmovleq %rsp,%r8
- subq %r8,%rbx
- movq %rbx,64(%rsp)
- vmovdqu %xmm2,192(%rsp)
- movl -104(%rdi),%ecx
- movq -120(%rdi),%r9
- cmpl %edx,%ecx
- movq -112(%rdi),%rbp
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- vmovdqu -96(%rdi),%xmm3
- movl %ecx,36(%rsp)
- cmovleq %rsp,%r9
- subq %r9,%rbp
- movq %rbp,72(%rsp)
- vmovdqu %xmm3,208(%rsp)
- movl -64(%rdi),%ecx
- movq -80(%rdi),%r10
- cmpl %edx,%ecx
- movq -72(%rdi),%rbp
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- vmovdqu -56(%rdi),%xmm4
- movl %ecx,40(%rsp)
- cmovleq %rsp,%r10
- subq %r10,%rbp
- movq %rbp,80(%rsp)
- vmovdqu %xmm4,224(%rsp)
- movl -24(%rdi),%ecx
- movq -40(%rdi),%r11
- cmpl %edx,%ecx
- movq -32(%rdi),%rbp
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- vmovdqu -16(%rdi),%xmm5
- movl %ecx,44(%rsp)
- cmovleq %rsp,%r11
- subq %r11,%rbp
- movq %rbp,88(%rsp)
- vmovdqu %xmm5,240(%rsp)
- movl 16(%rdi),%ecx
- movq 0(%rdi),%r12
- cmpl %edx,%ecx
- movq 8(%rdi),%rbp
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- vmovdqu 24(%rdi),%xmm6
- movl %ecx,48(%rsp)
- cmovleq %rsp,%r12
- subq %r12,%rbp
- movq %rbp,96(%rsp)
- vmovdqu %xmm6,256(%rsp)
- movl 56(%rdi),%ecx
- movq 40(%rdi),%r13
- cmpl %edx,%ecx
- movq 48(%rdi),%rbp
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- vmovdqu 64(%rdi),%xmm7
- movl %ecx,52(%rsp)
- cmovleq %rsp,%r13
- subq %r13,%rbp
- movq %rbp,104(%rsp)
- vmovdqu %xmm7,272(%rsp)
- movl 96(%rdi),%ecx
- movq 80(%rdi),%r14
- cmpl %edx,%ecx
- movq 88(%rdi),%rbp
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- vmovdqu 104(%rdi),%xmm8
- movl %ecx,56(%rsp)
- cmovleq %rsp,%r14
- subq %r14,%rbp
- movq %rbp,112(%rsp)
- vmovdqu %xmm8,288(%rsp)
- movl 136(%rdi),%ecx
- movq 120(%rdi),%r15
- cmpl %edx,%ecx
- movq 128(%rdi),%rbp
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- vmovdqu 144(%rdi),%xmm9
- movl %ecx,60(%rsp)
- cmovleq %rsp,%r15
- subq %r15,%rbp
- movq %rbp,120(%rsp)
- vmovdqu %xmm9,304(%rsp)
- testl %edx,%edx
- jz .Ldec8x_done
-
- vmovups 16-120(%rsi),%xmm1
- vmovups 32-120(%rsi),%xmm0
- movl 240-120(%rsi),%eax
- leaq 192+128(%rsp),%rbp
-
- vmovdqu (%r8),%xmm2
- vmovdqu (%r9),%xmm3
- vmovdqu (%r10),%xmm4
- vmovdqu (%r11),%xmm5
- vmovdqu (%r12),%xmm6
- vmovdqu (%r13),%xmm7
- vmovdqu (%r14),%xmm8
- vmovdqu (%r15),%xmm9
- vmovdqu %xmm2,0(%rbp)
- vpxor %xmm15,%xmm2,%xmm2
- vmovdqu %xmm3,16(%rbp)
- vpxor %xmm15,%xmm3,%xmm3
- vmovdqu %xmm4,32(%rbp)
- vpxor %xmm15,%xmm4,%xmm4
- vmovdqu %xmm5,48(%rbp)
- vpxor %xmm15,%xmm5,%xmm5
- vmovdqu %xmm6,64(%rbp)
- vpxor %xmm15,%xmm6,%xmm6
- vmovdqu %xmm7,80(%rbp)
- vpxor %xmm15,%xmm7,%xmm7
- vmovdqu %xmm8,96(%rbp)
- vpxor %xmm15,%xmm8,%xmm8
- vmovdqu %xmm9,112(%rbp)
- vpxor %xmm15,%xmm9,%xmm9
- xorq $0x80,%rbp
- movl $1,%ecx
- jmp .Loop_dec8x
-
-.align 32
-.Loop_dec8x:
- vaesdec %xmm1,%xmm2,%xmm2
- cmpl 32+0(%rsp),%ecx
- vaesdec %xmm1,%xmm3,%xmm3
- prefetcht0 31(%r8)
- vaesdec %xmm1,%xmm4,%xmm4
- vaesdec %xmm1,%xmm5,%xmm5
- leaq (%r8,%rbx,1),%rbx
- cmovgeq %rsp,%r8
- vaesdec %xmm1,%xmm6,%xmm6
- cmovgq %rsp,%rbx
- vaesdec %xmm1,%xmm7,%xmm7
- subq %r8,%rbx
- vaesdec %xmm1,%xmm8,%xmm8
- vmovdqu 16(%r8),%xmm10
- movq %rbx,64+0(%rsp)
- vaesdec %xmm1,%xmm9,%xmm9
- vmovups -72(%rsi),%xmm1
- leaq 16(%r8,%rbx,1),%r8
- vmovdqu %xmm10,128(%rsp)
- vaesdec %xmm0,%xmm2,%xmm2
- cmpl 32+4(%rsp),%ecx
- movq 64+8(%rsp),%rbx
- vaesdec %xmm0,%xmm3,%xmm3
- prefetcht0 31(%r9)
- vaesdec %xmm0,%xmm4,%xmm4
- vaesdec %xmm0,%xmm5,%xmm5
- leaq (%r9,%rbx,1),%rbx
- cmovgeq %rsp,%r9
- vaesdec %xmm0,%xmm6,%xmm6
- cmovgq %rsp,%rbx
- vaesdec %xmm0,%xmm7,%xmm7
- subq %r9,%rbx
- vaesdec %xmm0,%xmm8,%xmm8
- vmovdqu 16(%r9),%xmm11
- movq %rbx,64+8(%rsp)
- vaesdec %xmm0,%xmm9,%xmm9
- vmovups -56(%rsi),%xmm0
- leaq 16(%r9,%rbx,1),%r9
- vmovdqu %xmm11,144(%rsp)
- vaesdec %xmm1,%xmm2,%xmm2
- cmpl 32+8(%rsp),%ecx
- movq 64+16(%rsp),%rbx
- vaesdec %xmm1,%xmm3,%xmm3
- prefetcht0 31(%r10)
- vaesdec %xmm1,%xmm4,%xmm4
- prefetcht0 15(%r8)
- vaesdec %xmm1,%xmm5,%xmm5
- leaq (%r10,%rbx,1),%rbx
- cmovgeq %rsp,%r10
- vaesdec %xmm1,%xmm6,%xmm6
- cmovgq %rsp,%rbx
- vaesdec %xmm1,%xmm7,%xmm7
- subq %r10,%rbx
- vaesdec %xmm1,%xmm8,%xmm8
- vmovdqu 16(%r10),%xmm12
- movq %rbx,64+16(%rsp)
- vaesdec %xmm1,%xmm9,%xmm9
- vmovups -40(%rsi),%xmm1
- leaq 16(%r10,%rbx,1),%r10
- vmovdqu %xmm12,160(%rsp)
- vaesdec %xmm0,%xmm2,%xmm2
- cmpl 32+12(%rsp),%ecx
- movq 64+24(%rsp),%rbx
- vaesdec %xmm0,%xmm3,%xmm3
- prefetcht0 31(%r11)
- vaesdec %xmm0,%xmm4,%xmm4
- prefetcht0 15(%r9)
- vaesdec %xmm0,%xmm5,%xmm5
- leaq (%r11,%rbx,1),%rbx
- cmovgeq %rsp,%r11
- vaesdec %xmm0,%xmm6,%xmm6
- cmovgq %rsp,%rbx
- vaesdec %xmm0,%xmm7,%xmm7
- subq %r11,%rbx
- vaesdec %xmm0,%xmm8,%xmm8
- vmovdqu 16(%r11),%xmm13
- movq %rbx,64+24(%rsp)
- vaesdec %xmm0,%xmm9,%xmm9
- vmovups -24(%rsi),%xmm0
- leaq 16(%r11,%rbx,1),%r11
- vmovdqu %xmm13,176(%rsp)
- vaesdec %xmm1,%xmm2,%xmm2
- cmpl 32+16(%rsp),%ecx
- movq 64+32(%rsp),%rbx
- vaesdec %xmm1,%xmm3,%xmm3
- prefetcht0 31(%r12)
- vaesdec %xmm1,%xmm4,%xmm4
- prefetcht0 15(%r10)
- vaesdec %xmm1,%xmm5,%xmm5
- leaq (%r12,%rbx,1),%rbx
- cmovgeq %rsp,%r12
- vaesdec %xmm1,%xmm6,%xmm6
- cmovgq %rsp,%rbx
- vaesdec %xmm1,%xmm7,%xmm7
- subq %r12,%rbx
- vaesdec %xmm1,%xmm8,%xmm8
- vmovdqu 16(%r12),%xmm10
- movq %rbx,64+32(%rsp)
- vaesdec %xmm1,%xmm9,%xmm9
- vmovups -8(%rsi),%xmm1
- leaq 16(%r12,%rbx,1),%r12
- vaesdec %xmm0,%xmm2,%xmm2
- cmpl 32+20(%rsp),%ecx
- movq 64+40(%rsp),%rbx
- vaesdec %xmm0,%xmm3,%xmm3
- prefetcht0 31(%r13)
- vaesdec %xmm0,%xmm4,%xmm4
- prefetcht0 15(%r11)
- vaesdec %xmm0,%xmm5,%xmm5
- leaq (%rbx,%r13,1),%rbx
- cmovgeq %rsp,%r13
- vaesdec %xmm0,%xmm6,%xmm6
- cmovgq %rsp,%rbx
- vaesdec %xmm0,%xmm7,%xmm7
- subq %r13,%rbx
- vaesdec %xmm0,%xmm8,%xmm8
- vmovdqu 16(%r13),%xmm11
- movq %rbx,64+40(%rsp)
- vaesdec %xmm0,%xmm9,%xmm9
- vmovups 8(%rsi),%xmm0
- leaq 16(%r13,%rbx,1),%r13
- vaesdec %xmm1,%xmm2,%xmm2
- cmpl 32+24(%rsp),%ecx
- movq 64+48(%rsp),%rbx
- vaesdec %xmm1,%xmm3,%xmm3
- prefetcht0 31(%r14)
- vaesdec %xmm1,%xmm4,%xmm4
- prefetcht0 15(%r12)
- vaesdec %xmm1,%xmm5,%xmm5
- leaq (%r14,%rbx,1),%rbx
- cmovgeq %rsp,%r14
- vaesdec %xmm1,%xmm6,%xmm6
- cmovgq %rsp,%rbx
- vaesdec %xmm1,%xmm7,%xmm7
- subq %r14,%rbx
- vaesdec %xmm1,%xmm8,%xmm8
- vmovdqu 16(%r14),%xmm12
- movq %rbx,64+48(%rsp)
- vaesdec %xmm1,%xmm9,%xmm9
- vmovups 24(%rsi),%xmm1
- leaq 16(%r14,%rbx,1),%r14
- vaesdec %xmm0,%xmm2,%xmm2
- cmpl 32+28(%rsp),%ecx
- movq 64+56(%rsp),%rbx
- vaesdec %xmm0,%xmm3,%xmm3
- prefetcht0 31(%r15)
- vaesdec %xmm0,%xmm4,%xmm4
- prefetcht0 15(%r13)
- vaesdec %xmm0,%xmm5,%xmm5
- leaq (%r15,%rbx,1),%rbx
- cmovgeq %rsp,%r15
- vaesdec %xmm0,%xmm6,%xmm6
- cmovgq %rsp,%rbx
- vaesdec %xmm0,%xmm7,%xmm7
- subq %r15,%rbx
- vaesdec %xmm0,%xmm8,%xmm8
- vmovdqu 16(%r15),%xmm13
- movq %rbx,64+56(%rsp)
- vaesdec %xmm0,%xmm9,%xmm9
- vmovups 40(%rsi),%xmm0
- leaq 16(%r15,%rbx,1),%r15
- vmovdqu 32(%rsp),%xmm14
- prefetcht0 15(%r14)
- prefetcht0 15(%r15)
- cmpl $11,%eax
- jb .Ldec8x_tail
-
- vaesdec %xmm1,%xmm2,%xmm2
- vaesdec %xmm1,%xmm3,%xmm3
- vaesdec %xmm1,%xmm4,%xmm4
- vaesdec %xmm1,%xmm5,%xmm5
- vaesdec %xmm1,%xmm6,%xmm6
- vaesdec %xmm1,%xmm7,%xmm7
- vaesdec %xmm1,%xmm8,%xmm8
- vaesdec %xmm1,%xmm9,%xmm9
- vmovups 176-120(%rsi),%xmm1
-
- vaesdec %xmm0,%xmm2,%xmm2
- vaesdec %xmm0,%xmm3,%xmm3
- vaesdec %xmm0,%xmm4,%xmm4
- vaesdec %xmm0,%xmm5,%xmm5
- vaesdec %xmm0,%xmm6,%xmm6
- vaesdec %xmm0,%xmm7,%xmm7
- vaesdec %xmm0,%xmm8,%xmm8
- vaesdec %xmm0,%xmm9,%xmm9
- vmovups 192-120(%rsi),%xmm0
- je .Ldec8x_tail
-
- vaesdec %xmm1,%xmm2,%xmm2
- vaesdec %xmm1,%xmm3,%xmm3
- vaesdec %xmm1,%xmm4,%xmm4
- vaesdec %xmm1,%xmm5,%xmm5
- vaesdec %xmm1,%xmm6,%xmm6
- vaesdec %xmm1,%xmm7,%xmm7
- vaesdec %xmm1,%xmm8,%xmm8
- vaesdec %xmm1,%xmm9,%xmm9
- vmovups 208-120(%rsi),%xmm1
-
- vaesdec %xmm0,%xmm2,%xmm2
- vaesdec %xmm0,%xmm3,%xmm3
- vaesdec %xmm0,%xmm4,%xmm4
- vaesdec %xmm0,%xmm5,%xmm5
- vaesdec %xmm0,%xmm6,%xmm6
- vaesdec %xmm0,%xmm7,%xmm7
- vaesdec %xmm0,%xmm8,%xmm8
- vaesdec %xmm0,%xmm9,%xmm9
- vmovups 224-120(%rsi),%xmm0
-
-.Ldec8x_tail:
- vaesdec %xmm1,%xmm2,%xmm2
- vpxor %xmm15,%xmm15,%xmm15
- vaesdec %xmm1,%xmm3,%xmm3
- vaesdec %xmm1,%xmm4,%xmm4
- vpcmpgtd %xmm15,%xmm14,%xmm15
- vaesdec %xmm1,%xmm5,%xmm5
- vaesdec %xmm1,%xmm6,%xmm6
- vpaddd %xmm14,%xmm15,%xmm15
- vmovdqu 48(%rsp),%xmm14
- vaesdec %xmm1,%xmm7,%xmm7
- movq 64(%rsp),%rbx
- vaesdec %xmm1,%xmm8,%xmm8
- vaesdec %xmm1,%xmm9,%xmm9
- vmovups 16-120(%rsi),%xmm1
-
- vaesdeclast %xmm0,%xmm2,%xmm2
- vmovdqa %xmm15,32(%rsp)
- vpxor %xmm15,%xmm15,%xmm15
- vaesdeclast %xmm0,%xmm3,%xmm3
- vpxor 0(%rbp),%xmm2,%xmm2
- vaesdeclast %xmm0,%xmm4,%xmm4
- vpxor 16(%rbp),%xmm3,%xmm3
- vpcmpgtd %xmm15,%xmm14,%xmm15
- vaesdeclast %xmm0,%xmm5,%xmm5
- vpxor 32(%rbp),%xmm4,%xmm4
- vaesdeclast %xmm0,%xmm6,%xmm6
- vpxor 48(%rbp),%xmm5,%xmm5
- vpaddd %xmm15,%xmm14,%xmm14
- vmovdqu -120(%rsi),%xmm15
- vaesdeclast %xmm0,%xmm7,%xmm7
- vpxor 64(%rbp),%xmm6,%xmm6
- vaesdeclast %xmm0,%xmm8,%xmm8
- vpxor 80(%rbp),%xmm7,%xmm7
- vmovdqa %xmm14,48(%rsp)
- vaesdeclast %xmm0,%xmm9,%xmm9
- vpxor 96(%rbp),%xmm8,%xmm8
- vmovups 32-120(%rsi),%xmm0
-
- vmovups %xmm2,-16(%r8)
- subq %rbx,%r8
- vmovdqu 128+0(%rsp),%xmm2
- vpxor 112(%rbp),%xmm9,%xmm9
- vmovups %xmm3,-16(%r9)
- subq 72(%rsp),%r9
- vmovdqu %xmm2,0(%rbp)
- vpxor %xmm15,%xmm2,%xmm2
- vmovdqu 128+16(%rsp),%xmm3
- vmovups %xmm4,-16(%r10)
- subq 80(%rsp),%r10
- vmovdqu %xmm3,16(%rbp)
- vpxor %xmm15,%xmm3,%xmm3
- vmovdqu 128+32(%rsp),%xmm4
- vmovups %xmm5,-16(%r11)
- subq 88(%rsp),%r11
- vmovdqu %xmm4,32(%rbp)
- vpxor %xmm15,%xmm4,%xmm4
- vmovdqu 128+48(%rsp),%xmm5
- vmovups %xmm6,-16(%r12)
- subq 96(%rsp),%r12
- vmovdqu %xmm5,48(%rbp)
- vpxor %xmm15,%xmm5,%xmm5
- vmovdqu %xmm10,64(%rbp)
- vpxor %xmm10,%xmm15,%xmm6
- vmovups %xmm7,-16(%r13)
- subq 104(%rsp),%r13
- vmovdqu %xmm11,80(%rbp)
- vpxor %xmm11,%xmm15,%xmm7
- vmovups %xmm8,-16(%r14)
- subq 112(%rsp),%r14
- vmovdqu %xmm12,96(%rbp)
- vpxor %xmm12,%xmm15,%xmm8
- vmovups %xmm9,-16(%r15)
- subq 120(%rsp),%r15
- vmovdqu %xmm13,112(%rbp)
- vpxor %xmm13,%xmm15,%xmm9
-
- xorq $128,%rbp
- decl %edx
- jnz .Loop_dec8x
-
- movq 16(%rsp),%rax
-.cfi_def_cfa %rax,8
-
-
-
-
-
-.Ldec8x_done:
- vzeroupper
- movq -48(%rax),%r15
-.cfi_restore %r15
- movq -40(%rax),%r14
-.cfi_restore %r14
- movq -32(%rax),%r13
-.cfi_restore %r13
- movq -24(%rax),%r12
-.cfi_restore %r12
- movq -16(%rax),%rbp
-.cfi_restore %rbp
- movq -8(%rax),%rbx
-.cfi_restore %rbx
- leaq (%rax),%rsp
-.cfi_def_cfa_register %rsp
-.Ldec8x_epilogue:
- .byte 0xf3,0xc3
-.cfi_endproc
-.size aesni_multi_cbc_decrypt_avx,.-aesni_multi_cbc_decrypt_avx
diff --git a/secure/lib/libcrypto/amd64/aesni-sha1-x86_64.S b/secure/lib/libcrypto/amd64/aesni-sha1-x86_64.S
index 92fa5bfd685da..294db310a06a5 100644
--- a/secure/lib/libcrypto/amd64/aesni-sha1-x86_64.S
+++ b/secure/lib/libcrypto/amd64/aesni-sha1-x86_64.S
@@ -7,18 +7,15 @@
.type aesni_cbc_sha1_enc,@function
.align 32
aesni_cbc_sha1_enc:
+.cfi_startproc
movl OPENSSL_ia32cap_P+0(%rip),%r10d
movq OPENSSL_ia32cap_P+4(%rip),%r11
btq $61,%r11
jc aesni_cbc_sha1_enc_shaext
- andl $268435456,%r11d
- andl $1073741824,%r10d
- orl %r11d,%r10d
- cmpl $1342177280,%r10d
- je aesni_cbc_sha1_enc_avx
jmp aesni_cbc_sha1_enc_ssse3
.byte 0xf3,0xc3
+.cfi_endproc
.size aesni_cbc_sha1_enc,.-aesni_cbc_sha1_enc
.type aesni_cbc_sha1_enc_ssse3,@function
.align 32
@@ -1397,1327 +1394,6 @@ aesni_cbc_sha1_enc_ssse3:
.byte 0xf3,0xc3
.cfi_endproc
.size aesni_cbc_sha1_enc_ssse3,.-aesni_cbc_sha1_enc_ssse3
-.type aesni_cbc_sha1_enc_avx,@function
-.align 32
-aesni_cbc_sha1_enc_avx:
-.cfi_startproc
- movq 8(%rsp),%r10
-
-
- pushq %rbx
-.cfi_adjust_cfa_offset 8
-.cfi_offset %rbx,-16
- pushq %rbp
-.cfi_adjust_cfa_offset 8
-.cfi_offset %rbp,-24
- pushq %r12
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r12,-32
- pushq %r13
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r13,-40
- pushq %r14
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r14,-48
- pushq %r15
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r15,-56
- leaq -104(%rsp),%rsp
-.cfi_adjust_cfa_offset 104
-
-
- vzeroall
- movq %rdi,%r12
- movq %rsi,%r13
- movq %rdx,%r14
- leaq 112(%rcx),%r15
- vmovdqu (%r8),%xmm12
- movq %r8,88(%rsp)
- shlq $6,%r14
- subq %r12,%r13
- movl 240-112(%r15),%r8d
- addq %r10,%r14
-
- leaq K_XX_XX(%rip),%r11
- movl 0(%r9),%eax
- movl 4(%r9),%ebx
- movl 8(%r9),%ecx
- movl 12(%r9),%edx
- movl %ebx,%esi
- movl 16(%r9),%ebp
- movl %ecx,%edi
- xorl %edx,%edi
- andl %edi,%esi
-
- vmovdqa 64(%r11),%xmm6
- vmovdqa 0(%r11),%xmm10
- vmovdqu 0(%r10),%xmm0
- vmovdqu 16(%r10),%xmm1
- vmovdqu 32(%r10),%xmm2
- vmovdqu 48(%r10),%xmm3
- vpshufb %xmm6,%xmm0,%xmm0
- addq $64,%r10
- vpshufb %xmm6,%xmm1,%xmm1
- vpshufb %xmm6,%xmm2,%xmm2
- vpshufb %xmm6,%xmm3,%xmm3
- vpaddd %xmm10,%xmm0,%xmm4
- vpaddd %xmm10,%xmm1,%xmm5
- vpaddd %xmm10,%xmm2,%xmm6
- vmovdqa %xmm4,0(%rsp)
- vmovdqa %xmm5,16(%rsp)
- vmovdqa %xmm6,32(%rsp)
- vmovups -112(%r15),%xmm15
- vmovups 16-112(%r15),%xmm14
- jmp .Loop_avx
-.align 32
-.Loop_avx:
- shrdl $2,%ebx,%ebx
- vmovdqu 0(%r12),%xmm13
- vpxor %xmm15,%xmm13,%xmm13
- vpxor %xmm13,%xmm12,%xmm12
- vaesenc %xmm14,%xmm12,%xmm12
- vmovups -80(%r15),%xmm15
- xorl %edx,%esi
- vpalignr $8,%xmm0,%xmm1,%xmm4
- movl %eax,%edi
- addl 0(%rsp),%ebp
- vpaddd %xmm3,%xmm10,%xmm9
- xorl %ecx,%ebx
- shldl $5,%eax,%eax
- vpsrldq $4,%xmm3,%xmm8
- addl %esi,%ebp
- andl %ebx,%edi
- vpxor %xmm0,%xmm4,%xmm4
- xorl %ecx,%ebx
- addl %eax,%ebp
- vpxor %xmm2,%xmm8,%xmm8
- shrdl $7,%eax,%eax
- xorl %ecx,%edi
- movl %ebp,%esi
- addl 4(%rsp),%edx
- vpxor %xmm8,%xmm4,%xmm4
- xorl %ebx,%eax
- shldl $5,%ebp,%ebp
- vmovdqa %xmm9,48(%rsp)
- addl %edi,%edx
- vaesenc %xmm15,%xmm12,%xmm12
- vmovups -64(%r15),%xmm14
- andl %eax,%esi
- vpsrld $31,%xmm4,%xmm8
- xorl %ebx,%eax
- addl %ebp,%edx
- shrdl $7,%ebp,%ebp
- xorl %ebx,%esi
- vpslldq $12,%xmm4,%xmm9
- vpaddd %xmm4,%xmm4,%xmm4
- movl %edx,%edi
- addl 8(%rsp),%ecx
- xorl %eax,%ebp
- shldl $5,%edx,%edx
- vpor %xmm8,%xmm4,%xmm4
- vpsrld $30,%xmm9,%xmm8
- addl %esi,%ecx
- andl %ebp,%edi
- xorl %eax,%ebp
- addl %edx,%ecx
- vpslld $2,%xmm9,%xmm9
- vpxor %xmm8,%xmm4,%xmm4
- shrdl $7,%edx,%edx
- xorl %eax,%edi
- movl %ecx,%esi
- addl 12(%rsp),%ebx
- vaesenc %xmm14,%xmm12,%xmm12
- vmovups -48(%r15),%xmm15
- vpxor %xmm9,%xmm4,%xmm4
- xorl %ebp,%edx
- shldl $5,%ecx,%ecx
- addl %edi,%ebx
- andl %edx,%esi
- xorl %ebp,%edx
- addl %ecx,%ebx
- shrdl $7,%ecx,%ecx
- xorl %ebp,%esi
- vpalignr $8,%xmm1,%xmm2,%xmm5
- movl %ebx,%edi
- addl 16(%rsp),%eax
- vpaddd %xmm4,%xmm10,%xmm9
- xorl %edx,%ecx
- shldl $5,%ebx,%ebx
- vpsrldq $4,%xmm4,%xmm8
- addl %esi,%eax
- andl %ecx,%edi
- vpxor %xmm1,%xmm5,%xmm5
- xorl %edx,%ecx
- addl %ebx,%eax
- vpxor %xmm3,%xmm8,%xmm8
- shrdl $7,%ebx,%ebx
- vaesenc %xmm15,%xmm12,%xmm12
- vmovups -32(%r15),%xmm14
- xorl %edx,%edi
- movl %eax,%esi
- addl 20(%rsp),%ebp
- vpxor %xmm8,%xmm5,%xmm5
- xorl %ecx,%ebx
- shldl $5,%eax,%eax
- vmovdqa %xmm9,0(%rsp)
- addl %edi,%ebp
- andl %ebx,%esi
- vpsrld $31,%xmm5,%xmm8
- xorl %ecx,%ebx
- addl %eax,%ebp
- shrdl $7,%eax,%eax
- xorl %ecx,%esi
- vpslldq $12,%xmm5,%xmm9
- vpaddd %xmm5,%xmm5,%xmm5
- movl %ebp,%edi
- addl 24(%rsp),%edx
- xorl %ebx,%eax
- shldl $5,%ebp,%ebp
- vpor %xmm8,%xmm5,%xmm5
- vpsrld $30,%xmm9,%xmm8
- addl %esi,%edx
- vaesenc %xmm14,%xmm12,%xmm12
- vmovups -16(%r15),%xmm15
- andl %eax,%edi
- xorl %ebx,%eax
- addl %ebp,%edx
- vpslld $2,%xmm9,%xmm9
- vpxor %xmm8,%xmm5,%xmm5
- shrdl $7,%ebp,%ebp
- xorl %ebx,%edi
- movl %edx,%esi
- addl 28(%rsp),%ecx
- vpxor %xmm9,%xmm5,%xmm5
- xorl %eax,%ebp
- shldl $5,%edx,%edx
- vmovdqa 16(%r11),%xmm10
- addl %edi,%ecx
- andl %ebp,%esi
- xorl %eax,%ebp
- addl %edx,%ecx
- shrdl $7,%edx,%edx
- xorl %eax,%esi
- vpalignr $8,%xmm2,%xmm3,%xmm6
- movl %ecx,%edi
- addl 32(%rsp),%ebx
- vaesenc %xmm15,%xmm12,%xmm12
- vmovups 0(%r15),%xmm14
- vpaddd %xmm5,%xmm10,%xmm9
- xorl %ebp,%edx
- shldl $5,%ecx,%ecx
- vpsrldq $4,%xmm5,%xmm8
- addl %esi,%ebx
- andl %edx,%edi
- vpxor %xmm2,%xmm6,%xmm6
- xorl %ebp,%edx
- addl %ecx,%ebx
- vpxor %xmm4,%xmm8,%xmm8
- shrdl $7,%ecx,%ecx
- xorl %ebp,%edi
- movl %ebx,%esi
- addl 36(%rsp),%eax
- vpxor %xmm8,%xmm6,%xmm6
- xorl %edx,%ecx
- shldl $5,%ebx,%ebx
- vmovdqa %xmm9,16(%rsp)
- addl %edi,%eax
- andl %ecx,%esi
- vpsrld $31,%xmm6,%xmm8
- xorl %edx,%ecx
- addl %ebx,%eax
- shrdl $7,%ebx,%ebx
- vaesenc %xmm14,%xmm12,%xmm12
- vmovups 16(%r15),%xmm15
- xorl %edx,%esi
- vpslldq $12,%xmm6,%xmm9
- vpaddd %xmm6,%xmm6,%xmm6
- movl %eax,%edi
- addl 40(%rsp),%ebp
- xorl %ecx,%ebx
- shldl $5,%eax,%eax
- vpor %xmm8,%xmm6,%xmm6
- vpsrld $30,%xmm9,%xmm8
- addl %esi,%ebp
- andl %ebx,%edi
- xorl %ecx,%ebx
- addl %eax,%ebp
- vpslld $2,%xmm9,%xmm9
- vpxor %xmm8,%xmm6,%xmm6
- shrdl $7,%eax,%eax
- xorl %ecx,%edi
- movl %ebp,%esi
- addl 44(%rsp),%edx
- vpxor %xmm9,%xmm6,%xmm6
- xorl %ebx,%eax
- shldl $5,%ebp,%ebp
- addl %edi,%edx
- vaesenc %xmm15,%xmm12,%xmm12
- vmovups 32(%r15),%xmm14
- andl %eax,%esi
- xorl %ebx,%eax
- addl %ebp,%edx
- shrdl $7,%ebp,%ebp
- xorl %ebx,%esi
- vpalignr $8,%xmm3,%xmm4,%xmm7
- movl %edx,%edi
- addl 48(%rsp),%ecx
- vpaddd %xmm6,%xmm10,%xmm9
- xorl %eax,%ebp
- shldl $5,%edx,%edx
- vpsrldq $4,%xmm6,%xmm8
- addl %esi,%ecx
- andl %ebp,%edi
- vpxor %xmm3,%xmm7,%xmm7
- xorl %eax,%ebp
- addl %edx,%ecx
- vpxor %xmm5,%xmm8,%xmm8
- shrdl $7,%edx,%edx
- xorl %eax,%edi
- movl %ecx,%esi
- addl 52(%rsp),%ebx
- vaesenc %xmm14,%xmm12,%xmm12
- vmovups 48(%r15),%xmm15
- vpxor %xmm8,%xmm7,%xmm7
- xorl %ebp,%edx
- shldl $5,%ecx,%ecx
- vmovdqa %xmm9,32(%rsp)
- addl %edi,%ebx
- andl %edx,%esi
- vpsrld $31,%xmm7,%xmm8
- xorl %ebp,%edx
- addl %ecx,%ebx
- shrdl $7,%ecx,%ecx
- xorl %ebp,%esi
- vpslldq $12,%xmm7,%xmm9
- vpaddd %xmm7,%xmm7,%xmm7
- movl %ebx,%edi
- addl 56(%rsp),%eax
- xorl %edx,%ecx
- shldl $5,%ebx,%ebx
- vpor %xmm8,%xmm7,%xmm7
- vpsrld $30,%xmm9,%xmm8
- addl %esi,%eax
- andl %ecx,%edi
- xorl %edx,%ecx
- addl %ebx,%eax
- vpslld $2,%xmm9,%xmm9
- vpxor %xmm8,%xmm7,%xmm7
- shrdl $7,%ebx,%ebx
- cmpl $11,%r8d
- jb .Lvaesenclast6
- vaesenc %xmm15,%xmm12,%xmm12
- vmovups 64(%r15),%xmm14
- vaesenc %xmm14,%xmm12,%xmm12
- vmovups 80(%r15),%xmm15
- je .Lvaesenclast6
- vaesenc %xmm15,%xmm12,%xmm12
- vmovups 96(%r15),%xmm14
- vaesenc %xmm14,%xmm12,%xmm12
- vmovups 112(%r15),%xmm15
-.Lvaesenclast6:
- vaesenclast %xmm15,%xmm12,%xmm12
- vmovups -112(%r15),%xmm15
- vmovups 16-112(%r15),%xmm14
- xorl %edx,%edi
- movl %eax,%esi
- addl 60(%rsp),%ebp
- vpxor %xmm9,%xmm7,%xmm7
- xorl %ecx,%ebx
- shldl $5,%eax,%eax
- addl %edi,%ebp
- andl %ebx,%esi
- xorl %ecx,%ebx
- addl %eax,%ebp
- vpalignr $8,%xmm6,%xmm7,%xmm8
- vpxor %xmm4,%xmm0,%xmm0
- shrdl $7,%eax,%eax
- xorl %ecx,%esi
- movl %ebp,%edi
- addl 0(%rsp),%edx
- vpxor %xmm1,%xmm0,%xmm0
- xorl %ebx,%eax
- shldl $5,%ebp,%ebp
- vpaddd %xmm7,%xmm10,%xmm9
- addl %esi,%edx
- vmovdqu 16(%r12),%xmm13
- vpxor %xmm15,%xmm13,%xmm13
- vmovups %xmm12,0(%r12,%r13,1)
- vpxor %xmm13,%xmm12,%xmm12
- vaesenc %xmm14,%xmm12,%xmm12
- vmovups -80(%r15),%xmm15
- andl %eax,%edi
- vpxor %xmm8,%xmm0,%xmm0
- xorl %ebx,%eax
- addl %ebp,%edx
- shrdl $7,%ebp,%ebp
- xorl %ebx,%edi
- vpsrld $30,%xmm0,%xmm8
- vmovdqa %xmm9,48(%rsp)
- movl %edx,%esi
- addl 4(%rsp),%ecx
- xorl %eax,%ebp
- shldl $5,%edx,%edx
- vpslld $2,%xmm0,%xmm0
- addl %edi,%ecx
- andl %ebp,%esi
- xorl %eax,%ebp
- addl %edx,%ecx
- shrdl $7,%edx,%edx
- xorl %eax,%esi
- movl %ecx,%edi
- addl 8(%rsp),%ebx
- vaesenc %xmm15,%xmm12,%xmm12
- vmovups -64(%r15),%xmm14
- vpor %xmm8,%xmm0,%xmm0
- xorl %ebp,%edx
- shldl $5,%ecx,%ecx
- addl %esi,%ebx
- andl %edx,%edi
- xorl %ebp,%edx
- addl %ecx,%ebx
- addl 12(%rsp),%eax
- xorl %ebp,%edi
- movl %ebx,%esi
- shldl $5,%ebx,%ebx
- addl %edi,%eax
- xorl %edx,%esi
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- vpalignr $8,%xmm7,%xmm0,%xmm8
- vpxor %xmm5,%xmm1,%xmm1
- addl 16(%rsp),%ebp
- vaesenc %xmm14,%xmm12,%xmm12
- vmovups -48(%r15),%xmm15
- xorl %ecx,%esi
- movl %eax,%edi
- shldl $5,%eax,%eax
- vpxor %xmm2,%xmm1,%xmm1
- addl %esi,%ebp
- xorl %ecx,%edi
- vpaddd %xmm0,%xmm10,%xmm9
- shrdl $7,%ebx,%ebx
- addl %eax,%ebp
- vpxor %xmm8,%xmm1,%xmm1
- addl 20(%rsp),%edx
- xorl %ebx,%edi
- movl %ebp,%esi
- shldl $5,%ebp,%ebp
- vpsrld $30,%xmm1,%xmm8
- vmovdqa %xmm9,0(%rsp)
- addl %edi,%edx
- xorl %ebx,%esi
- shrdl $7,%eax,%eax
- addl %ebp,%edx
- vpslld $2,%xmm1,%xmm1
- addl 24(%rsp),%ecx
- xorl %eax,%esi
- movl %edx,%edi
- shldl $5,%edx,%edx
- addl %esi,%ecx
- vaesenc %xmm15,%xmm12,%xmm12
- vmovups -32(%r15),%xmm14
- xorl %eax,%edi
- shrdl $7,%ebp,%ebp
- addl %edx,%ecx
- vpor %xmm8,%xmm1,%xmm1
- addl 28(%rsp),%ebx
- xorl %ebp,%edi
- movl %ecx,%esi
- shldl $5,%ecx,%ecx
- addl %edi,%ebx
- xorl %ebp,%esi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- vpalignr $8,%xmm0,%xmm1,%xmm8
- vpxor %xmm6,%xmm2,%xmm2
- addl 32(%rsp),%eax
- xorl %edx,%esi
- movl %ebx,%edi
- shldl $5,%ebx,%ebx
- vpxor %xmm3,%xmm2,%xmm2
- addl %esi,%eax
- xorl %edx,%edi
- vpaddd %xmm1,%xmm10,%xmm9
- vmovdqa 32(%r11),%xmm10
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- vpxor %xmm8,%xmm2,%xmm2
- addl 36(%rsp),%ebp
- vaesenc %xmm14,%xmm12,%xmm12
- vmovups -16(%r15),%xmm15
- xorl %ecx,%edi
- movl %eax,%esi
- shldl $5,%eax,%eax
- vpsrld $30,%xmm2,%xmm8
- vmovdqa %xmm9,16(%rsp)
- addl %edi,%ebp
- xorl %ecx,%esi
- shrdl $7,%ebx,%ebx
- addl %eax,%ebp
- vpslld $2,%xmm2,%xmm2
- addl 40(%rsp),%edx
- xorl %ebx,%esi
- movl %ebp,%edi
- shldl $5,%ebp,%ebp
- addl %esi,%edx
- xorl %ebx,%edi
- shrdl $7,%eax,%eax
- addl %ebp,%edx
- vpor %xmm8,%xmm2,%xmm2
- addl 44(%rsp),%ecx
- xorl %eax,%edi
- movl %edx,%esi
- shldl $5,%edx,%edx
- addl %edi,%ecx
- vaesenc %xmm15,%xmm12,%xmm12
- vmovups 0(%r15),%xmm14
- xorl %eax,%esi
- shrdl $7,%ebp,%ebp
- addl %edx,%ecx
- vpalignr $8,%xmm1,%xmm2,%xmm8
- vpxor %xmm7,%xmm3,%xmm3
- addl 48(%rsp),%ebx
- xorl %ebp,%esi
- movl %ecx,%edi
- shldl $5,%ecx,%ecx
- vpxor %xmm4,%xmm3,%xmm3
- addl %esi,%ebx
- xorl %ebp,%edi
- vpaddd %xmm2,%xmm10,%xmm9
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- vpxor %xmm8,%xmm3,%xmm3
- addl 52(%rsp),%eax
- xorl %edx,%edi
- movl %ebx,%esi
- shldl $5,%ebx,%ebx
- vpsrld $30,%xmm3,%xmm8
- vmovdqa %xmm9,32(%rsp)
- addl %edi,%eax
- xorl %edx,%esi
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- vpslld $2,%xmm3,%xmm3
- addl 56(%rsp),%ebp
- vaesenc %xmm14,%xmm12,%xmm12
- vmovups 16(%r15),%xmm15
- xorl %ecx,%esi
- movl %eax,%edi
- shldl $5,%eax,%eax
- addl %esi,%ebp
- xorl %ecx,%edi
- shrdl $7,%ebx,%ebx
- addl %eax,%ebp
- vpor %xmm8,%xmm3,%xmm3
- addl 60(%rsp),%edx
- xorl %ebx,%edi
- movl %ebp,%esi
- shldl $5,%ebp,%ebp
- addl %edi,%edx
- xorl %ebx,%esi
- shrdl $7,%eax,%eax
- addl %ebp,%edx
- vpalignr $8,%xmm2,%xmm3,%xmm8
- vpxor %xmm0,%xmm4,%xmm4
- addl 0(%rsp),%ecx
- xorl %eax,%esi
- movl %edx,%edi
- shldl $5,%edx,%edx
- vpxor %xmm5,%xmm4,%xmm4
- addl %esi,%ecx
- vaesenc %xmm15,%xmm12,%xmm12
- vmovups 32(%r15),%xmm14
- xorl %eax,%edi
- vpaddd %xmm3,%xmm10,%xmm9
- shrdl $7,%ebp,%ebp
- addl %edx,%ecx
- vpxor %xmm8,%xmm4,%xmm4
- addl 4(%rsp),%ebx
- xorl %ebp,%edi
- movl %ecx,%esi
- shldl $5,%ecx,%ecx
- vpsrld $30,%xmm4,%xmm8
- vmovdqa %xmm9,48(%rsp)
- addl %edi,%ebx
- xorl %ebp,%esi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- vpslld $2,%xmm4,%xmm4
- addl 8(%rsp),%eax
- xorl %edx,%esi
- movl %ebx,%edi
- shldl $5,%ebx,%ebx
- addl %esi,%eax
- xorl %edx,%edi
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- vpor %xmm8,%xmm4,%xmm4
- addl 12(%rsp),%ebp
- vaesenc %xmm14,%xmm12,%xmm12
- vmovups 48(%r15),%xmm15
- xorl %ecx,%edi
- movl %eax,%esi
- shldl $5,%eax,%eax
- addl %edi,%ebp
- xorl %ecx,%esi
- shrdl $7,%ebx,%ebx
- addl %eax,%ebp
- vpalignr $8,%xmm3,%xmm4,%xmm8
- vpxor %xmm1,%xmm5,%xmm5
- addl 16(%rsp),%edx
- xorl %ebx,%esi
- movl %ebp,%edi
- shldl $5,%ebp,%ebp
- vpxor %xmm6,%xmm5,%xmm5
- addl %esi,%edx
- xorl %ebx,%edi
- vpaddd %xmm4,%xmm10,%xmm9
- shrdl $7,%eax,%eax
- addl %ebp,%edx
- vpxor %xmm8,%xmm5,%xmm5
- addl 20(%rsp),%ecx
- xorl %eax,%edi
- movl %edx,%esi
- shldl $5,%edx,%edx
- vpsrld $30,%xmm5,%xmm8
- vmovdqa %xmm9,0(%rsp)
- addl %edi,%ecx
- cmpl $11,%r8d
- jb .Lvaesenclast7
- vaesenc %xmm15,%xmm12,%xmm12
- vmovups 64(%r15),%xmm14
- vaesenc %xmm14,%xmm12,%xmm12
- vmovups 80(%r15),%xmm15
- je .Lvaesenclast7
- vaesenc %xmm15,%xmm12,%xmm12
- vmovups 96(%r15),%xmm14
- vaesenc %xmm14,%xmm12,%xmm12
- vmovups 112(%r15),%xmm15
-.Lvaesenclast7:
- vaesenclast %xmm15,%xmm12,%xmm12
- vmovups -112(%r15),%xmm15
- vmovups 16-112(%r15),%xmm14
- xorl %eax,%esi
- shrdl $7,%ebp,%ebp
- addl %edx,%ecx
- vpslld $2,%xmm5,%xmm5
- addl 24(%rsp),%ebx
- xorl %ebp,%esi
- movl %ecx,%edi
- shldl $5,%ecx,%ecx
- addl %esi,%ebx
- xorl %ebp,%edi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- vpor %xmm8,%xmm5,%xmm5
- addl 28(%rsp),%eax
- shrdl $7,%ecx,%ecx
- movl %ebx,%esi
- xorl %edx,%edi
- shldl $5,%ebx,%ebx
- addl %edi,%eax
- xorl %ecx,%esi
- xorl %edx,%ecx
- addl %ebx,%eax
- vpalignr $8,%xmm4,%xmm5,%xmm8
- vpxor %xmm2,%xmm6,%xmm6
- addl 32(%rsp),%ebp
- vmovdqu 32(%r12),%xmm13
- vpxor %xmm15,%xmm13,%xmm13
- vmovups %xmm12,16(%r13,%r12,1)
- vpxor %xmm13,%xmm12,%xmm12
- vaesenc %xmm14,%xmm12,%xmm12
- vmovups -80(%r15),%xmm15
- andl %ecx,%esi
- xorl %edx,%ecx
- shrdl $7,%ebx,%ebx
- vpxor %xmm7,%xmm6,%xmm6
- movl %eax,%edi
- xorl %ecx,%esi
- vpaddd %xmm5,%xmm10,%xmm9
- shldl $5,%eax,%eax
- addl %esi,%ebp
- vpxor %xmm8,%xmm6,%xmm6
- xorl %ebx,%edi
- xorl %ecx,%ebx
- addl %eax,%ebp
- addl 36(%rsp),%edx
- vpsrld $30,%xmm6,%xmm8
- vmovdqa %xmm9,16(%rsp)
- andl %ebx,%edi
- xorl %ecx,%ebx
- shrdl $7,%eax,%eax
- movl %ebp,%esi
- vpslld $2,%xmm6,%xmm6
- xorl %ebx,%edi
- shldl $5,%ebp,%ebp
- addl %edi,%edx
- vaesenc %xmm15,%xmm12,%xmm12
- vmovups -64(%r15),%xmm14
- xorl %eax,%esi
- xorl %ebx,%eax
- addl %ebp,%edx
- addl 40(%rsp),%ecx
- andl %eax,%esi
- vpor %xmm8,%xmm6,%xmm6
- xorl %ebx,%eax
- shrdl $7,%ebp,%ebp
- movl %edx,%edi
- xorl %eax,%esi
- shldl $5,%edx,%edx
- addl %esi,%ecx
- xorl %ebp,%edi
- xorl %eax,%ebp
- addl %edx,%ecx
- addl 44(%rsp),%ebx
- andl %ebp,%edi
- xorl %eax,%ebp
- shrdl $7,%edx,%edx
- vaesenc %xmm14,%xmm12,%xmm12
- vmovups -48(%r15),%xmm15
- movl %ecx,%esi
- xorl %ebp,%edi
- shldl $5,%ecx,%ecx
- addl %edi,%ebx
- xorl %edx,%esi
- xorl %ebp,%edx
- addl %ecx,%ebx
- vpalignr $8,%xmm5,%xmm6,%xmm8
- vpxor %xmm3,%xmm7,%xmm7
- addl 48(%rsp),%eax
- andl %edx,%esi
- xorl %ebp,%edx
- shrdl $7,%ecx,%ecx
- vpxor %xmm0,%xmm7,%xmm7
- movl %ebx,%edi
- xorl %edx,%esi
- vpaddd %xmm6,%xmm10,%xmm9
- vmovdqa 48(%r11),%xmm10
- shldl $5,%ebx,%ebx
- addl %esi,%eax
- vpxor %xmm8,%xmm7,%xmm7
- xorl %ecx,%edi
- xorl %edx,%ecx
- addl %ebx,%eax
- addl 52(%rsp),%ebp
- vaesenc %xmm15,%xmm12,%xmm12
- vmovups -32(%r15),%xmm14
- vpsrld $30,%xmm7,%xmm8
- vmovdqa %xmm9,32(%rsp)
- andl %ecx,%edi
- xorl %edx,%ecx
- shrdl $7,%ebx,%ebx
- movl %eax,%esi
- vpslld $2,%xmm7,%xmm7
- xorl %ecx,%edi
- shldl $5,%eax,%eax
- addl %edi,%ebp
- xorl %ebx,%esi
- xorl %ecx,%ebx
- addl %eax,%ebp
- addl 56(%rsp),%edx
- andl %ebx,%esi
- vpor %xmm8,%xmm7,%xmm7
- xorl %ecx,%ebx
- shrdl $7,%eax,%eax
- movl %ebp,%edi
- xorl %ebx,%esi
- shldl $5,%ebp,%ebp
- addl %esi,%edx
- vaesenc %xmm14,%xmm12,%xmm12
- vmovups -16(%r15),%xmm15
- xorl %eax,%edi
- xorl %ebx,%eax
- addl %ebp,%edx
- addl 60(%rsp),%ecx
- andl %eax,%edi
- xorl %ebx,%eax
- shrdl $7,%ebp,%ebp
- movl %edx,%esi
- xorl %eax,%edi
- shldl $5,%edx,%edx
- addl %edi,%ecx
- xorl %ebp,%esi
- xorl %eax,%ebp
- addl %edx,%ecx
- vpalignr $8,%xmm6,%xmm7,%xmm8
- vpxor %xmm4,%xmm0,%xmm0
- addl 0(%rsp),%ebx
- andl %ebp,%esi
- xorl %eax,%ebp
- shrdl $7,%edx,%edx
- vaesenc %xmm15,%xmm12,%xmm12
- vmovups 0(%r15),%xmm14
- vpxor %xmm1,%xmm0,%xmm0
- movl %ecx,%edi
- xorl %ebp,%esi
- vpaddd %xmm7,%xmm10,%xmm9
- shldl $5,%ecx,%ecx
- addl %esi,%ebx
- vpxor %xmm8,%xmm0,%xmm0
- xorl %edx,%edi
- xorl %ebp,%edx
- addl %ecx,%ebx
- addl 4(%rsp),%eax
- vpsrld $30,%xmm0,%xmm8
- vmovdqa %xmm9,48(%rsp)
- andl %edx,%edi
- xorl %ebp,%edx
- shrdl $7,%ecx,%ecx
- movl %ebx,%esi
- vpslld $2,%xmm0,%xmm0
- xorl %edx,%edi
- shldl $5,%ebx,%ebx
- addl %edi,%eax
- xorl %ecx,%esi
- xorl %edx,%ecx
- addl %ebx,%eax
- addl 8(%rsp),%ebp
- vaesenc %xmm14,%xmm12,%xmm12
- vmovups 16(%r15),%xmm15
- andl %ecx,%esi
- vpor %xmm8,%xmm0,%xmm0
- xorl %edx,%ecx
- shrdl $7,%ebx,%ebx
- movl %eax,%edi
- xorl %ecx,%esi
- shldl $5,%eax,%eax
- addl %esi,%ebp
- xorl %ebx,%edi
- xorl %ecx,%ebx
- addl %eax,%ebp
- addl 12(%rsp),%edx
- andl %ebx,%edi
- xorl %ecx,%ebx
- shrdl $7,%eax,%eax
- movl %ebp,%esi
- xorl %ebx,%edi
- shldl $5,%ebp,%ebp
- addl %edi,%edx
- vaesenc %xmm15,%xmm12,%xmm12
- vmovups 32(%r15),%xmm14
- xorl %eax,%esi
- xorl %ebx,%eax
- addl %ebp,%edx
- vpalignr $8,%xmm7,%xmm0,%xmm8
- vpxor %xmm5,%xmm1,%xmm1
- addl 16(%rsp),%ecx
- andl %eax,%esi
- xorl %ebx,%eax
- shrdl $7,%ebp,%ebp
- vpxor %xmm2,%xmm1,%xmm1
- movl %edx,%edi
- xorl %eax,%esi
- vpaddd %xmm0,%xmm10,%xmm9
- shldl $5,%edx,%edx
- addl %esi,%ecx
- vpxor %xmm8,%xmm1,%xmm1
- xorl %ebp,%edi
- xorl %eax,%ebp
- addl %edx,%ecx
- addl 20(%rsp),%ebx
- vpsrld $30,%xmm1,%xmm8
- vmovdqa %xmm9,0(%rsp)
- andl %ebp,%edi
- xorl %eax,%ebp
- shrdl $7,%edx,%edx
- vaesenc %xmm14,%xmm12,%xmm12
- vmovups 48(%r15),%xmm15
- movl %ecx,%esi
- vpslld $2,%xmm1,%xmm1
- xorl %ebp,%edi
- shldl $5,%ecx,%ecx
- addl %edi,%ebx
- xorl %edx,%esi
- xorl %ebp,%edx
- addl %ecx,%ebx
- addl 24(%rsp),%eax
- andl %edx,%esi
- vpor %xmm8,%xmm1,%xmm1
- xorl %ebp,%edx
- shrdl $7,%ecx,%ecx
- movl %ebx,%edi
- xorl %edx,%esi
- shldl $5,%ebx,%ebx
- addl %esi,%eax
- xorl %ecx,%edi
- xorl %edx,%ecx
- addl %ebx,%eax
- addl 28(%rsp),%ebp
- cmpl $11,%r8d
- jb .Lvaesenclast8
- vaesenc %xmm15,%xmm12,%xmm12
- vmovups 64(%r15),%xmm14
- vaesenc %xmm14,%xmm12,%xmm12
- vmovups 80(%r15),%xmm15
- je .Lvaesenclast8
- vaesenc %xmm15,%xmm12,%xmm12
- vmovups 96(%r15),%xmm14
- vaesenc %xmm14,%xmm12,%xmm12
- vmovups 112(%r15),%xmm15
-.Lvaesenclast8:
- vaesenclast %xmm15,%xmm12,%xmm12
- vmovups -112(%r15),%xmm15
- vmovups 16-112(%r15),%xmm14
- andl %ecx,%edi
- xorl %edx,%ecx
- shrdl $7,%ebx,%ebx
- movl %eax,%esi
- xorl %ecx,%edi
- shldl $5,%eax,%eax
- addl %edi,%ebp
- xorl %ebx,%esi
- xorl %ecx,%ebx
- addl %eax,%ebp
- vpalignr $8,%xmm0,%xmm1,%xmm8
- vpxor %xmm6,%xmm2,%xmm2
- addl 32(%rsp),%edx
- andl %ebx,%esi
- xorl %ecx,%ebx
- shrdl $7,%eax,%eax
- vpxor %xmm3,%xmm2,%xmm2
- movl %ebp,%edi
- xorl %ebx,%esi
- vpaddd %xmm1,%xmm10,%xmm9
- shldl $5,%ebp,%ebp
- addl %esi,%edx
- vmovdqu 48(%r12),%xmm13
- vpxor %xmm15,%xmm13,%xmm13
- vmovups %xmm12,32(%r13,%r12,1)
- vpxor %xmm13,%xmm12,%xmm12
- vaesenc %xmm14,%xmm12,%xmm12
- vmovups -80(%r15),%xmm15
- vpxor %xmm8,%xmm2,%xmm2
- xorl %eax,%edi
- xorl %ebx,%eax
- addl %ebp,%edx
- addl 36(%rsp),%ecx
- vpsrld $30,%xmm2,%xmm8
- vmovdqa %xmm9,16(%rsp)
- andl %eax,%edi
- xorl %ebx,%eax
- shrdl $7,%ebp,%ebp
- movl %edx,%esi
- vpslld $2,%xmm2,%xmm2
- xorl %eax,%edi
- shldl $5,%edx,%edx
- addl %edi,%ecx
- xorl %ebp,%esi
- xorl %eax,%ebp
- addl %edx,%ecx
- addl 40(%rsp),%ebx
- andl %ebp,%esi
- vpor %xmm8,%xmm2,%xmm2
- xorl %eax,%ebp
- shrdl $7,%edx,%edx
- vaesenc %xmm15,%xmm12,%xmm12
- vmovups -64(%r15),%xmm14
- movl %ecx,%edi
- xorl %ebp,%esi
- shldl $5,%ecx,%ecx
- addl %esi,%ebx
- xorl %edx,%edi
- xorl %ebp,%edx
- addl %ecx,%ebx
- addl 44(%rsp),%eax
- andl %edx,%edi
- xorl %ebp,%edx
- shrdl $7,%ecx,%ecx
- movl %ebx,%esi
- xorl %edx,%edi
- shldl $5,%ebx,%ebx
- addl %edi,%eax
- xorl %edx,%esi
- addl %ebx,%eax
- vpalignr $8,%xmm1,%xmm2,%xmm8
- vpxor %xmm7,%xmm3,%xmm3
- addl 48(%rsp),%ebp
- vaesenc %xmm14,%xmm12,%xmm12
- vmovups -48(%r15),%xmm15
- xorl %ecx,%esi
- movl %eax,%edi
- shldl $5,%eax,%eax
- vpxor %xmm4,%xmm3,%xmm3
- addl %esi,%ebp
- xorl %ecx,%edi
- vpaddd %xmm2,%xmm10,%xmm9
- shrdl $7,%ebx,%ebx
- addl %eax,%ebp
- vpxor %xmm8,%xmm3,%xmm3
- addl 52(%rsp),%edx
- xorl %ebx,%edi
- movl %ebp,%esi
- shldl $5,%ebp,%ebp
- vpsrld $30,%xmm3,%xmm8
- vmovdqa %xmm9,32(%rsp)
- addl %edi,%edx
- xorl %ebx,%esi
- shrdl $7,%eax,%eax
- addl %ebp,%edx
- vpslld $2,%xmm3,%xmm3
- addl 56(%rsp),%ecx
- xorl %eax,%esi
- movl %edx,%edi
- shldl $5,%edx,%edx
- addl %esi,%ecx
- vaesenc %xmm15,%xmm12,%xmm12
- vmovups -32(%r15),%xmm14
- xorl %eax,%edi
- shrdl $7,%ebp,%ebp
- addl %edx,%ecx
- vpor %xmm8,%xmm3,%xmm3
- addl 60(%rsp),%ebx
- xorl %ebp,%edi
- movl %ecx,%esi
- shldl $5,%ecx,%ecx
- addl %edi,%ebx
- xorl %ebp,%esi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- addl 0(%rsp),%eax
- vpaddd %xmm3,%xmm10,%xmm9
- xorl %edx,%esi
- movl %ebx,%edi
- shldl $5,%ebx,%ebx
- addl %esi,%eax
- vmovdqa %xmm9,48(%rsp)
- xorl %edx,%edi
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- addl 4(%rsp),%ebp
- vaesenc %xmm14,%xmm12,%xmm12
- vmovups -16(%r15),%xmm15
- xorl %ecx,%edi
- movl %eax,%esi
- shldl $5,%eax,%eax
- addl %edi,%ebp
- xorl %ecx,%esi
- shrdl $7,%ebx,%ebx
- addl %eax,%ebp
- addl 8(%rsp),%edx
- xorl %ebx,%esi
- movl %ebp,%edi
- shldl $5,%ebp,%ebp
- addl %esi,%edx
- xorl %ebx,%edi
- shrdl $7,%eax,%eax
- addl %ebp,%edx
- addl 12(%rsp),%ecx
- xorl %eax,%edi
- movl %edx,%esi
- shldl $5,%edx,%edx
- addl %edi,%ecx
- vaesenc %xmm15,%xmm12,%xmm12
- vmovups 0(%r15),%xmm14
- xorl %eax,%esi
- shrdl $7,%ebp,%ebp
- addl %edx,%ecx
- cmpq %r14,%r10
- je .Ldone_avx
- vmovdqa 64(%r11),%xmm9
- vmovdqa 0(%r11),%xmm10
- vmovdqu 0(%r10),%xmm0
- vmovdqu 16(%r10),%xmm1
- vmovdqu 32(%r10),%xmm2
- vmovdqu 48(%r10),%xmm3
- vpshufb %xmm9,%xmm0,%xmm0
- addq $64,%r10
- addl 16(%rsp),%ebx
- xorl %ebp,%esi
- vpshufb %xmm9,%xmm1,%xmm1
- movl %ecx,%edi
- shldl $5,%ecx,%ecx
- vpaddd %xmm10,%xmm0,%xmm8
- addl %esi,%ebx
- xorl %ebp,%edi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- vmovdqa %xmm8,0(%rsp)
- addl 20(%rsp),%eax
- xorl %edx,%edi
- movl %ebx,%esi
- shldl $5,%ebx,%ebx
- addl %edi,%eax
- xorl %edx,%esi
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- addl 24(%rsp),%ebp
- vaesenc %xmm14,%xmm12,%xmm12
- vmovups 16(%r15),%xmm15
- xorl %ecx,%esi
- movl %eax,%edi
- shldl $5,%eax,%eax
- addl %esi,%ebp
- xorl %ecx,%edi
- shrdl $7,%ebx,%ebx
- addl %eax,%ebp
- addl 28(%rsp),%edx
- xorl %ebx,%edi
- movl %ebp,%esi
- shldl $5,%ebp,%ebp
- addl %edi,%edx
- xorl %ebx,%esi
- shrdl $7,%eax,%eax
- addl %ebp,%edx
- addl 32(%rsp),%ecx
- xorl %eax,%esi
- vpshufb %xmm9,%xmm2,%xmm2
- movl %edx,%edi
- shldl $5,%edx,%edx
- vpaddd %xmm10,%xmm1,%xmm8
- addl %esi,%ecx
- vaesenc %xmm15,%xmm12,%xmm12
- vmovups 32(%r15),%xmm14
- xorl %eax,%edi
- shrdl $7,%ebp,%ebp
- addl %edx,%ecx
- vmovdqa %xmm8,16(%rsp)
- addl 36(%rsp),%ebx
- xorl %ebp,%edi
- movl %ecx,%esi
- shldl $5,%ecx,%ecx
- addl %edi,%ebx
- xorl %ebp,%esi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- addl 40(%rsp),%eax
- xorl %edx,%esi
- movl %ebx,%edi
- shldl $5,%ebx,%ebx
- addl %esi,%eax
- xorl %edx,%edi
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- addl 44(%rsp),%ebp
- vaesenc %xmm14,%xmm12,%xmm12
- vmovups 48(%r15),%xmm15
- xorl %ecx,%edi
- movl %eax,%esi
- shldl $5,%eax,%eax
- addl %edi,%ebp
- xorl %ecx,%esi
- shrdl $7,%ebx,%ebx
- addl %eax,%ebp
- addl 48(%rsp),%edx
- xorl %ebx,%esi
- vpshufb %xmm9,%xmm3,%xmm3
- movl %ebp,%edi
- shldl $5,%ebp,%ebp
- vpaddd %xmm10,%xmm2,%xmm8
- addl %esi,%edx
- xorl %ebx,%edi
- shrdl $7,%eax,%eax
- addl %ebp,%edx
- vmovdqa %xmm8,32(%rsp)
- addl 52(%rsp),%ecx
- xorl %eax,%edi
- movl %edx,%esi
- shldl $5,%edx,%edx
- addl %edi,%ecx
- cmpl $11,%r8d
- jb .Lvaesenclast9
- vaesenc %xmm15,%xmm12,%xmm12
- vmovups 64(%r15),%xmm14
- vaesenc %xmm14,%xmm12,%xmm12
- vmovups 80(%r15),%xmm15
- je .Lvaesenclast9
- vaesenc %xmm15,%xmm12,%xmm12
- vmovups 96(%r15),%xmm14
- vaesenc %xmm14,%xmm12,%xmm12
- vmovups 112(%r15),%xmm15
-.Lvaesenclast9:
- vaesenclast %xmm15,%xmm12,%xmm12
- vmovups -112(%r15),%xmm15
- vmovups 16-112(%r15),%xmm14
- xorl %eax,%esi
- shrdl $7,%ebp,%ebp
- addl %edx,%ecx
- addl 56(%rsp),%ebx
- xorl %ebp,%esi
- movl %ecx,%edi
- shldl $5,%ecx,%ecx
- addl %esi,%ebx
- xorl %ebp,%edi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- addl 60(%rsp),%eax
- xorl %edx,%edi
- movl %ebx,%esi
- shldl $5,%ebx,%ebx
- addl %edi,%eax
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- vmovups %xmm12,48(%r13,%r12,1)
- leaq 64(%r12),%r12
-
- addl 0(%r9),%eax
- addl 4(%r9),%esi
- addl 8(%r9),%ecx
- addl 12(%r9),%edx
- movl %eax,0(%r9)
- addl 16(%r9),%ebp
- movl %esi,4(%r9)
- movl %esi,%ebx
- movl %ecx,8(%r9)
- movl %ecx,%edi
- movl %edx,12(%r9)
- xorl %edx,%edi
- movl %ebp,16(%r9)
- andl %edi,%esi
- jmp .Loop_avx
-
-.Ldone_avx:
- addl 16(%rsp),%ebx
- xorl %ebp,%esi
- movl %ecx,%edi
- shldl $5,%ecx,%ecx
- addl %esi,%ebx
- xorl %ebp,%edi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- addl 20(%rsp),%eax
- xorl %edx,%edi
- movl %ebx,%esi
- shldl $5,%ebx,%ebx
- addl %edi,%eax
- xorl %edx,%esi
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- addl 24(%rsp),%ebp
- vaesenc %xmm14,%xmm12,%xmm12
- vmovups 16(%r15),%xmm15
- xorl %ecx,%esi
- movl %eax,%edi
- shldl $5,%eax,%eax
- addl %esi,%ebp
- xorl %ecx,%edi
- shrdl $7,%ebx,%ebx
- addl %eax,%ebp
- addl 28(%rsp),%edx
- xorl %ebx,%edi
- movl %ebp,%esi
- shldl $5,%ebp,%ebp
- addl %edi,%edx
- xorl %ebx,%esi
- shrdl $7,%eax,%eax
- addl %ebp,%edx
- addl 32(%rsp),%ecx
- xorl %eax,%esi
- movl %edx,%edi
- shldl $5,%edx,%edx
- addl %esi,%ecx
- vaesenc %xmm15,%xmm12,%xmm12
- vmovups 32(%r15),%xmm14
- xorl %eax,%edi
- shrdl $7,%ebp,%ebp
- addl %edx,%ecx
- addl 36(%rsp),%ebx
- xorl %ebp,%edi
- movl %ecx,%esi
- shldl $5,%ecx,%ecx
- addl %edi,%ebx
- xorl %ebp,%esi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- addl 40(%rsp),%eax
- xorl %edx,%esi
- movl %ebx,%edi
- shldl $5,%ebx,%ebx
- addl %esi,%eax
- xorl %edx,%edi
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- addl 44(%rsp),%ebp
- vaesenc %xmm14,%xmm12,%xmm12
- vmovups 48(%r15),%xmm15
- xorl %ecx,%edi
- movl %eax,%esi
- shldl $5,%eax,%eax
- addl %edi,%ebp
- xorl %ecx,%esi
- shrdl $7,%ebx,%ebx
- addl %eax,%ebp
- addl 48(%rsp),%edx
- xorl %ebx,%esi
- movl %ebp,%edi
- shldl $5,%ebp,%ebp
- addl %esi,%edx
- xorl %ebx,%edi
- shrdl $7,%eax,%eax
- addl %ebp,%edx
- addl 52(%rsp),%ecx
- xorl %eax,%edi
- movl %edx,%esi
- shldl $5,%edx,%edx
- addl %edi,%ecx
- cmpl $11,%r8d
- jb .Lvaesenclast10
- vaesenc %xmm15,%xmm12,%xmm12
- vmovups 64(%r15),%xmm14
- vaesenc %xmm14,%xmm12,%xmm12
- vmovups 80(%r15),%xmm15
- je .Lvaesenclast10
- vaesenc %xmm15,%xmm12,%xmm12
- vmovups 96(%r15),%xmm14
- vaesenc %xmm14,%xmm12,%xmm12
- vmovups 112(%r15),%xmm15
-.Lvaesenclast10:
- vaesenclast %xmm15,%xmm12,%xmm12
- vmovups -112(%r15),%xmm15
- vmovups 16-112(%r15),%xmm14
- xorl %eax,%esi
- shrdl $7,%ebp,%ebp
- addl %edx,%ecx
- addl 56(%rsp),%ebx
- xorl %ebp,%esi
- movl %ecx,%edi
- shldl $5,%ecx,%ecx
- addl %esi,%ebx
- xorl %ebp,%edi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- addl 60(%rsp),%eax
- xorl %edx,%edi
- movl %ebx,%esi
- shldl $5,%ebx,%ebx
- addl %edi,%eax
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- vmovups %xmm12,48(%r13,%r12,1)
- movq 88(%rsp),%r8
-
- addl 0(%r9),%eax
- addl 4(%r9),%esi
- addl 8(%r9),%ecx
- movl %eax,0(%r9)
- addl 12(%r9),%edx
- movl %esi,4(%r9)
- addl 16(%r9),%ebp
- movl %ecx,8(%r9)
- movl %edx,12(%r9)
- movl %ebp,16(%r9)
- vmovups %xmm12,(%r8)
- vzeroall
- leaq 104(%rsp),%rsi
-.cfi_def_cfa %rsi,56
- movq 0(%rsi),%r15
-.cfi_restore %r15
- movq 8(%rsi),%r14
-.cfi_restore %r14
- movq 16(%rsi),%r13
-.cfi_restore %r13
- movq 24(%rsi),%r12
-.cfi_restore %r12
- movq 32(%rsi),%rbp
-.cfi_restore %rbp
- movq 40(%rsi),%rbx
-.cfi_restore %rbx
- leaq 48(%rsi),%rsp
-.cfi_def_cfa %rsp,8
-.Lepilogue_avx:
- .byte 0xf3,0xc3
-.cfi_endproc
-.size aesni_cbc_sha1_enc_avx,.-aesni_cbc_sha1_enc_avx
.align 64
K_XX_XX:
.long 0x5a827999,0x5a827999,0x5a827999,0x5a827999
@@ -2732,6 +1408,7 @@ K_XX_XX:
.type aesni_cbc_sha1_enc_shaext,@function
.align 32
aesni_cbc_sha1_enc_shaext:
+.cfi_startproc
movq 8(%rsp),%r10
movdqu (%r9),%xmm8
movd 16(%r9),%xmm9
@@ -2808,17 +1485,17 @@ aesni_cbc_sha1_enc_shaext:
pxor %xmm3,%xmm5
.byte 15,56,201,243
cmpl $11,%r11d
- jb .Laesenclast11
+ jb .Laesenclast6
movups 64(%rcx),%xmm0
.byte 102,15,56,220,209
movups 80(%rcx),%xmm1
.byte 102,15,56,220,208
- je .Laesenclast11
+ je .Laesenclast6
movups 96(%rcx),%xmm0
.byte 102,15,56,220,209
movups 112(%rcx),%xmm1
.byte 102,15,56,220,208
-.Laesenclast11:
+.Laesenclast6:
.byte 102,15,56,221,209
movups 16-112(%rcx),%xmm0
movdqa %xmm8,%xmm10
@@ -2874,17 +1551,17 @@ aesni_cbc_sha1_enc_shaext:
pxor %xmm4,%xmm6
.byte 15,56,201,220
cmpl $11,%r11d
- jb .Laesenclast12
+ jb .Laesenclast7
movups 64(%rcx),%xmm0
.byte 102,15,56,220,209
movups 80(%rcx),%xmm1
.byte 102,15,56,220,208
- je .Laesenclast12
+ je .Laesenclast7
movups 96(%rcx),%xmm0
.byte 102,15,56,220,209
movups 112(%rcx),%xmm1
.byte 102,15,56,220,208
-.Laesenclast12:
+.Laesenclast7:
.byte 102,15,56,221,209
movups 16-112(%rcx),%xmm0
movdqa %xmm8,%xmm9
@@ -2940,17 +1617,17 @@ aesni_cbc_sha1_enc_shaext:
pxor %xmm5,%xmm3
.byte 15,56,201,229
cmpl $11,%r11d
- jb .Laesenclast13
+ jb .Laesenclast8
movups 64(%rcx),%xmm0
.byte 102,15,56,220,209
movups 80(%rcx),%xmm1
.byte 102,15,56,220,208
- je .Laesenclast13
+ je .Laesenclast8
movups 96(%rcx),%xmm0
.byte 102,15,56,220,209
movups 112(%rcx),%xmm1
.byte 102,15,56,220,208
-.Laesenclast13:
+.Laesenclast8:
.byte 102,15,56,221,209
movups 16-112(%rcx),%xmm0
movdqa %xmm8,%xmm10
@@ -3004,17 +1681,17 @@ aesni_cbc_sha1_enc_shaext:
movups 48(%rcx),%xmm1
.byte 102,15,56,220,208
cmpl $11,%r11d
- jb .Laesenclast14
+ jb .Laesenclast9
movups 64(%rcx),%xmm0
.byte 102,15,56,220,209
movups 80(%rcx),%xmm1
.byte 102,15,56,220,208
- je .Laesenclast14
+ je .Laesenclast9
movups 96(%rcx),%xmm0
.byte 102,15,56,220,209
movups 112(%rcx),%xmm1
.byte 102,15,56,220,208
-.Laesenclast14:
+.Laesenclast9:
.byte 102,15,56,221,209
movups 16-112(%rcx),%xmm0
decq %rdx
@@ -3030,4 +1707,5 @@ aesni_cbc_sha1_enc_shaext:
movdqu %xmm8,(%r9)
movd %xmm9,16(%r9)
.byte 0xf3,0xc3
+.cfi_endproc
.size aesni_cbc_sha1_enc_shaext,.-aesni_cbc_sha1_enc_shaext
diff --git a/secure/lib/libcrypto/amd64/aesni-sha256-x86_64.S b/secure/lib/libcrypto/amd64/aesni-sha256-x86_64.S
index e013190f87275..e42a02ebe6479 100644
--- a/secure/lib/libcrypto/amd64/aesni-sha256-x86_64.S
+++ b/secure/lib/libcrypto/amd64/aesni-sha256-x86_64.S
@@ -7,31 +7,14 @@
.type aesni_cbc_sha256_enc,@function
.align 16
aesni_cbc_sha256_enc:
- leaq OPENSSL_ia32cap_P(%rip),%r11
- movl $1,%eax
- cmpq $0,%rdi
- je .Lprobe
- movl 0(%r11),%eax
- movq 4(%r11),%r10
- btq $61,%r10
- jc aesni_cbc_sha256_enc_shaext
- movq %r10,%r11
- shrq $32,%r11
-
- testl $2048,%r10d
- jnz aesni_cbc_sha256_enc_xop
- andl $296,%r11d
- cmpl $296,%r11d
- je aesni_cbc_sha256_enc_avx2
- andl $268435456,%r10d
- jnz aesni_cbc_sha256_enc_avx
- ud2
+.cfi_startproc
xorl %eax,%eax
cmpq $0,%rdi
je .Lprobe
ud2
.Lprobe:
.byte 0xf3,0xc3
+.cfi_endproc
.size aesni_cbc_sha256_enc,.-aesni_cbc_sha256_enc
.align 64
@@ -76,4336 +59,3 @@ K256:
.long 0,0,0,0, 0,0,0,0
.byte 65,69,83,78,73,45,67,66,67,43,83,72,65,50,53,54,32,115,116,105,116,99,104,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
.align 64
-.type aesni_cbc_sha256_enc_xop,@function
-.align 64
-aesni_cbc_sha256_enc_xop:
-.cfi_startproc
-.Lxop_shortcut:
- movq 8(%rsp),%r10
- movq %rsp,%rax
-.cfi_def_cfa_register %rax
- pushq %rbx
-.cfi_offset %rbx,-16
- pushq %rbp
-.cfi_offset %rbp,-24
- pushq %r12
-.cfi_offset %r12,-32
- pushq %r13
-.cfi_offset %r13,-40
- pushq %r14
-.cfi_offset %r14,-48
- pushq %r15
-.cfi_offset %r15,-56
- subq $128,%rsp
- andq $-64,%rsp
-
- shlq $6,%rdx
- subq %rdi,%rsi
- subq %rdi,%r10
- addq %rdi,%rdx
-
-
- movq %rsi,64+8(%rsp)
- movq %rdx,64+16(%rsp)
-
- movq %r8,64+32(%rsp)
- movq %r9,64+40(%rsp)
- movq %r10,64+48(%rsp)
- movq %rax,120(%rsp)
-.cfi_escape 0x0f,0x06,0x77,0xf8,0x00,0x06,0x23,0x08
-.Lprologue_xop:
- vzeroall
-
- movq %rdi,%r12
- leaq 128(%rcx),%rdi
- leaq K256+544(%rip),%r13
- movl 240-128(%rdi),%r14d
- movq %r9,%r15
- movq %r10,%rsi
- vmovdqu (%r8),%xmm8
- subq $9,%r14
-
- movl 0(%r15),%eax
- movl 4(%r15),%ebx
- movl 8(%r15),%ecx
- movl 12(%r15),%edx
- movl 16(%r15),%r8d
- movl 20(%r15),%r9d
- movl 24(%r15),%r10d
- movl 28(%r15),%r11d
-
- vmovdqa 0(%r13,%r14,8),%xmm14
- vmovdqa 16(%r13,%r14,8),%xmm13
- vmovdqa 32(%r13,%r14,8),%xmm12
- vmovdqu 0-128(%rdi),%xmm10
- jmp .Lloop_xop
-.align 16
-.Lloop_xop:
- vmovdqa K256+512(%rip),%xmm7
- vmovdqu 0(%rsi,%r12,1),%xmm0
- vmovdqu 16(%rsi,%r12,1),%xmm1
- vmovdqu 32(%rsi,%r12,1),%xmm2
- vmovdqu 48(%rsi,%r12,1),%xmm3
- vpshufb %xmm7,%xmm0,%xmm0
- leaq K256(%rip),%rbp
- vpshufb %xmm7,%xmm1,%xmm1
- vpshufb %xmm7,%xmm2,%xmm2
- vpaddd 0(%rbp),%xmm0,%xmm4
- vpshufb %xmm7,%xmm3,%xmm3
- vpaddd 32(%rbp),%xmm1,%xmm5
- vpaddd 64(%rbp),%xmm2,%xmm6
- vpaddd 96(%rbp),%xmm3,%xmm7
- vmovdqa %xmm4,0(%rsp)
- movl %eax,%r14d
- vmovdqa %xmm5,16(%rsp)
- movl %ebx,%esi
- vmovdqa %xmm6,32(%rsp)
- xorl %ecx,%esi
- vmovdqa %xmm7,48(%rsp)
- movl %r8d,%r13d
- jmp .Lxop_00_47
-
-.align 16
-.Lxop_00_47:
- subq $-32*4,%rbp
- vmovdqu (%r12),%xmm9
- movq %r12,64+0(%rsp)
- vpalignr $4,%xmm0,%xmm1,%xmm4
- rorl $14,%r13d
- movl %r14d,%eax
- vpalignr $4,%xmm2,%xmm3,%xmm7
- movl %r9d,%r12d
- xorl %r8d,%r13d
-.byte 143,232,120,194,236,14
- rorl $9,%r14d
- xorl %r10d,%r12d
- vpsrld $3,%xmm4,%xmm4
- rorl $5,%r13d
- xorl %eax,%r14d
- vpaddd %xmm7,%xmm0,%xmm0
- andl %r8d,%r12d
- vpxor %xmm10,%xmm9,%xmm9
- vmovdqu 16-128(%rdi),%xmm10
- xorl %r8d,%r13d
- addl 0(%rsp),%r11d
- movl %eax,%r15d
-.byte 143,232,120,194,245,11
- rorl $11,%r14d
- xorl %r10d,%r12d
- vpxor %xmm5,%xmm4,%xmm4
- xorl %ebx,%r15d
- rorl $6,%r13d
- addl %r12d,%r11d
- andl %r15d,%esi
-.byte 143,232,120,194,251,13
- xorl %eax,%r14d
- addl %r13d,%r11d
- vpxor %xmm6,%xmm4,%xmm4
- xorl %ebx,%esi
- addl %r11d,%edx
- vpsrld $10,%xmm3,%xmm6
- rorl $2,%r14d
- addl %esi,%r11d
- vpaddd %xmm4,%xmm0,%xmm0
- movl %edx,%r13d
- addl %r11d,%r14d
-.byte 143,232,120,194,239,2
- rorl $14,%r13d
- movl %r14d,%r11d
- vpxor %xmm6,%xmm7,%xmm7
- movl %r8d,%r12d
- xorl %edx,%r13d
- rorl $9,%r14d
- xorl %r9d,%r12d
- vpxor %xmm5,%xmm7,%xmm7
- rorl $5,%r13d
- xorl %r11d,%r14d
- andl %edx,%r12d
- vpxor %xmm8,%xmm9,%xmm9
- xorl %edx,%r13d
- vpsrldq $8,%xmm7,%xmm7
- addl 4(%rsp),%r10d
- movl %r11d,%esi
- rorl $11,%r14d
- xorl %r9d,%r12d
- vpaddd %xmm7,%xmm0,%xmm0
- xorl %eax,%esi
- rorl $6,%r13d
- addl %r12d,%r10d
- andl %esi,%r15d
-.byte 143,232,120,194,248,13
- xorl %r11d,%r14d
- addl %r13d,%r10d
- vpsrld $10,%xmm0,%xmm6
- xorl %eax,%r15d
- addl %r10d,%ecx
-.byte 143,232,120,194,239,2
- rorl $2,%r14d
- addl %r15d,%r10d
- vpxor %xmm6,%xmm7,%xmm7
- movl %ecx,%r13d
- addl %r10d,%r14d
- rorl $14,%r13d
- movl %r14d,%r10d
- vpxor %xmm5,%xmm7,%xmm7
- movl %edx,%r12d
- xorl %ecx,%r13d
- rorl $9,%r14d
- xorl %r8d,%r12d
- vpslldq $8,%xmm7,%xmm7
- rorl $5,%r13d
- xorl %r10d,%r14d
- andl %ecx,%r12d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 32-128(%rdi),%xmm10
- xorl %ecx,%r13d
- vpaddd %xmm7,%xmm0,%xmm0
- addl 8(%rsp),%r9d
- movl %r10d,%r15d
- rorl $11,%r14d
- xorl %r8d,%r12d
- vpaddd 0(%rbp),%xmm0,%xmm6
- xorl %r11d,%r15d
- rorl $6,%r13d
- addl %r12d,%r9d
- andl %r15d,%esi
- xorl %r10d,%r14d
- addl %r13d,%r9d
- xorl %r11d,%esi
- addl %r9d,%ebx
- rorl $2,%r14d
- addl %esi,%r9d
- movl %ebx,%r13d
- addl %r9d,%r14d
- rorl $14,%r13d
- movl %r14d,%r9d
- movl %ecx,%r12d
- xorl %ebx,%r13d
- rorl $9,%r14d
- xorl %edx,%r12d
- rorl $5,%r13d
- xorl %r9d,%r14d
- andl %ebx,%r12d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 48-128(%rdi),%xmm10
- xorl %ebx,%r13d
- addl 12(%rsp),%r8d
- movl %r9d,%esi
- rorl $11,%r14d
- xorl %edx,%r12d
- xorl %r10d,%esi
- rorl $6,%r13d
- addl %r12d,%r8d
- andl %esi,%r15d
- xorl %r9d,%r14d
- addl %r13d,%r8d
- xorl %r10d,%r15d
- addl %r8d,%eax
- rorl $2,%r14d
- addl %r15d,%r8d
- movl %eax,%r13d
- addl %r8d,%r14d
- vmovdqa %xmm6,0(%rsp)
- vpalignr $4,%xmm1,%xmm2,%xmm4
- rorl $14,%r13d
- movl %r14d,%r8d
- vpalignr $4,%xmm3,%xmm0,%xmm7
- movl %ebx,%r12d
- xorl %eax,%r13d
-.byte 143,232,120,194,236,14
- rorl $9,%r14d
- xorl %ecx,%r12d
- vpsrld $3,%xmm4,%xmm4
- rorl $5,%r13d
- xorl %r8d,%r14d
- vpaddd %xmm7,%xmm1,%xmm1
- andl %eax,%r12d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 64-128(%rdi),%xmm10
- xorl %eax,%r13d
- addl 16(%rsp),%edx
- movl %r8d,%r15d
-.byte 143,232,120,194,245,11
- rorl $11,%r14d
- xorl %ecx,%r12d
- vpxor %xmm5,%xmm4,%xmm4
- xorl %r9d,%r15d
- rorl $6,%r13d
- addl %r12d,%edx
- andl %r15d,%esi
-.byte 143,232,120,194,248,13
- xorl %r8d,%r14d
- addl %r13d,%edx
- vpxor %xmm6,%xmm4,%xmm4
- xorl %r9d,%esi
- addl %edx,%r11d
- vpsrld $10,%xmm0,%xmm6
- rorl $2,%r14d
- addl %esi,%edx
- vpaddd %xmm4,%xmm1,%xmm1
- movl %r11d,%r13d
- addl %edx,%r14d
-.byte 143,232,120,194,239,2
- rorl $14,%r13d
- movl %r14d,%edx
- vpxor %xmm6,%xmm7,%xmm7
- movl %eax,%r12d
- xorl %r11d,%r13d
- rorl $9,%r14d
- xorl %ebx,%r12d
- vpxor %xmm5,%xmm7,%xmm7
- rorl $5,%r13d
- xorl %edx,%r14d
- andl %r11d,%r12d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 80-128(%rdi),%xmm10
- xorl %r11d,%r13d
- vpsrldq $8,%xmm7,%xmm7
- addl 20(%rsp),%ecx
- movl %edx,%esi
- rorl $11,%r14d
- xorl %ebx,%r12d
- vpaddd %xmm7,%xmm1,%xmm1
- xorl %r8d,%esi
- rorl $6,%r13d
- addl %r12d,%ecx
- andl %esi,%r15d
-.byte 143,232,120,194,249,13
- xorl %edx,%r14d
- addl %r13d,%ecx
- vpsrld $10,%xmm1,%xmm6
- xorl %r8d,%r15d
- addl %ecx,%r10d
-.byte 143,232,120,194,239,2
- rorl $2,%r14d
- addl %r15d,%ecx
- vpxor %xmm6,%xmm7,%xmm7
- movl %r10d,%r13d
- addl %ecx,%r14d
- rorl $14,%r13d
- movl %r14d,%ecx
- vpxor %xmm5,%xmm7,%xmm7
- movl %r11d,%r12d
- xorl %r10d,%r13d
- rorl $9,%r14d
- xorl %eax,%r12d
- vpslldq $8,%xmm7,%xmm7
- rorl $5,%r13d
- xorl %ecx,%r14d
- andl %r10d,%r12d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 96-128(%rdi),%xmm10
- xorl %r10d,%r13d
- vpaddd %xmm7,%xmm1,%xmm1
- addl 24(%rsp),%ebx
- movl %ecx,%r15d
- rorl $11,%r14d
- xorl %eax,%r12d
- vpaddd 32(%rbp),%xmm1,%xmm6
- xorl %edx,%r15d
- rorl $6,%r13d
- addl %r12d,%ebx
- andl %r15d,%esi
- xorl %ecx,%r14d
- addl %r13d,%ebx
- xorl %edx,%esi
- addl %ebx,%r9d
- rorl $2,%r14d
- addl %esi,%ebx
- movl %r9d,%r13d
- addl %ebx,%r14d
- rorl $14,%r13d
- movl %r14d,%ebx
- movl %r10d,%r12d
- xorl %r9d,%r13d
- rorl $9,%r14d
- xorl %r11d,%r12d
- rorl $5,%r13d
- xorl %ebx,%r14d
- andl %r9d,%r12d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 112-128(%rdi),%xmm10
- xorl %r9d,%r13d
- addl 28(%rsp),%eax
- movl %ebx,%esi
- rorl $11,%r14d
- xorl %r11d,%r12d
- xorl %ecx,%esi
- rorl $6,%r13d
- addl %r12d,%eax
- andl %esi,%r15d
- xorl %ebx,%r14d
- addl %r13d,%eax
- xorl %ecx,%r15d
- addl %eax,%r8d
- rorl $2,%r14d
- addl %r15d,%eax
- movl %r8d,%r13d
- addl %eax,%r14d
- vmovdqa %xmm6,16(%rsp)
- vpalignr $4,%xmm2,%xmm3,%xmm4
- rorl $14,%r13d
- movl %r14d,%eax
- vpalignr $4,%xmm0,%xmm1,%xmm7
- movl %r9d,%r12d
- xorl %r8d,%r13d
-.byte 143,232,120,194,236,14
- rorl $9,%r14d
- xorl %r10d,%r12d
- vpsrld $3,%xmm4,%xmm4
- rorl $5,%r13d
- xorl %eax,%r14d
- vpaddd %xmm7,%xmm2,%xmm2
- andl %r8d,%r12d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 128-128(%rdi),%xmm10
- xorl %r8d,%r13d
- addl 32(%rsp),%r11d
- movl %eax,%r15d
-.byte 143,232,120,194,245,11
- rorl $11,%r14d
- xorl %r10d,%r12d
- vpxor %xmm5,%xmm4,%xmm4
- xorl %ebx,%r15d
- rorl $6,%r13d
- addl %r12d,%r11d
- andl %r15d,%esi
-.byte 143,232,120,194,249,13
- xorl %eax,%r14d
- addl %r13d,%r11d
- vpxor %xmm6,%xmm4,%xmm4
- xorl %ebx,%esi
- addl %r11d,%edx
- vpsrld $10,%xmm1,%xmm6
- rorl $2,%r14d
- addl %esi,%r11d
- vpaddd %xmm4,%xmm2,%xmm2
- movl %edx,%r13d
- addl %r11d,%r14d
-.byte 143,232,120,194,239,2
- rorl $14,%r13d
- movl %r14d,%r11d
- vpxor %xmm6,%xmm7,%xmm7
- movl %r8d,%r12d
- xorl %edx,%r13d
- rorl $9,%r14d
- xorl %r9d,%r12d
- vpxor %xmm5,%xmm7,%xmm7
- rorl $5,%r13d
- xorl %r11d,%r14d
- andl %edx,%r12d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 144-128(%rdi),%xmm10
- xorl %edx,%r13d
- vpsrldq $8,%xmm7,%xmm7
- addl 36(%rsp),%r10d
- movl %r11d,%esi
- rorl $11,%r14d
- xorl %r9d,%r12d
- vpaddd %xmm7,%xmm2,%xmm2
- xorl %eax,%esi
- rorl $6,%r13d
- addl %r12d,%r10d
- andl %esi,%r15d
-.byte 143,232,120,194,250,13
- xorl %r11d,%r14d
- addl %r13d,%r10d
- vpsrld $10,%xmm2,%xmm6
- xorl %eax,%r15d
- addl %r10d,%ecx
-.byte 143,232,120,194,239,2
- rorl $2,%r14d
- addl %r15d,%r10d
- vpxor %xmm6,%xmm7,%xmm7
- movl %ecx,%r13d
- addl %r10d,%r14d
- rorl $14,%r13d
- movl %r14d,%r10d
- vpxor %xmm5,%xmm7,%xmm7
- movl %edx,%r12d
- xorl %ecx,%r13d
- rorl $9,%r14d
- xorl %r8d,%r12d
- vpslldq $8,%xmm7,%xmm7
- rorl $5,%r13d
- xorl %r10d,%r14d
- andl %ecx,%r12d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 160-128(%rdi),%xmm10
- xorl %ecx,%r13d
- vpaddd %xmm7,%xmm2,%xmm2
- addl 40(%rsp),%r9d
- movl %r10d,%r15d
- rorl $11,%r14d
- xorl %r8d,%r12d
- vpaddd 64(%rbp),%xmm2,%xmm6
- xorl %r11d,%r15d
- rorl $6,%r13d
- addl %r12d,%r9d
- andl %r15d,%esi
- xorl %r10d,%r14d
- addl %r13d,%r9d
- xorl %r11d,%esi
- addl %r9d,%ebx
- rorl $2,%r14d
- addl %esi,%r9d
- movl %ebx,%r13d
- addl %r9d,%r14d
- rorl $14,%r13d
- movl %r14d,%r9d
- movl %ecx,%r12d
- xorl %ebx,%r13d
- rorl $9,%r14d
- xorl %edx,%r12d
- rorl $5,%r13d
- xorl %r9d,%r14d
- andl %ebx,%r12d
- vaesenclast %xmm10,%xmm9,%xmm11
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 176-128(%rdi),%xmm10
- xorl %ebx,%r13d
- addl 44(%rsp),%r8d
- movl %r9d,%esi
- rorl $11,%r14d
- xorl %edx,%r12d
- xorl %r10d,%esi
- rorl $6,%r13d
- addl %r12d,%r8d
- andl %esi,%r15d
- xorl %r9d,%r14d
- addl %r13d,%r8d
- xorl %r10d,%r15d
- addl %r8d,%eax
- rorl $2,%r14d
- addl %r15d,%r8d
- movl %eax,%r13d
- addl %r8d,%r14d
- vmovdqa %xmm6,32(%rsp)
- vpalignr $4,%xmm3,%xmm0,%xmm4
- rorl $14,%r13d
- movl %r14d,%r8d
- vpalignr $4,%xmm1,%xmm2,%xmm7
- movl %ebx,%r12d
- xorl %eax,%r13d
-.byte 143,232,120,194,236,14
- rorl $9,%r14d
- xorl %ecx,%r12d
- vpsrld $3,%xmm4,%xmm4
- rorl $5,%r13d
- xorl %r8d,%r14d
- vpaddd %xmm7,%xmm3,%xmm3
- andl %eax,%r12d
- vpand %xmm12,%xmm11,%xmm8
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 192-128(%rdi),%xmm10
- xorl %eax,%r13d
- addl 48(%rsp),%edx
- movl %r8d,%r15d
-.byte 143,232,120,194,245,11
- rorl $11,%r14d
- xorl %ecx,%r12d
- vpxor %xmm5,%xmm4,%xmm4
- xorl %r9d,%r15d
- rorl $6,%r13d
- addl %r12d,%edx
- andl %r15d,%esi
-.byte 143,232,120,194,250,13
- xorl %r8d,%r14d
- addl %r13d,%edx
- vpxor %xmm6,%xmm4,%xmm4
- xorl %r9d,%esi
- addl %edx,%r11d
- vpsrld $10,%xmm2,%xmm6
- rorl $2,%r14d
- addl %esi,%edx
- vpaddd %xmm4,%xmm3,%xmm3
- movl %r11d,%r13d
- addl %edx,%r14d
-.byte 143,232,120,194,239,2
- rorl $14,%r13d
- movl %r14d,%edx
- vpxor %xmm6,%xmm7,%xmm7
- movl %eax,%r12d
- xorl %r11d,%r13d
- rorl $9,%r14d
- xorl %ebx,%r12d
- vpxor %xmm5,%xmm7,%xmm7
- rorl $5,%r13d
- xorl %edx,%r14d
- andl %r11d,%r12d
- vaesenclast %xmm10,%xmm9,%xmm11
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 208-128(%rdi),%xmm10
- xorl %r11d,%r13d
- vpsrldq $8,%xmm7,%xmm7
- addl 52(%rsp),%ecx
- movl %edx,%esi
- rorl $11,%r14d
- xorl %ebx,%r12d
- vpaddd %xmm7,%xmm3,%xmm3
- xorl %r8d,%esi
- rorl $6,%r13d
- addl %r12d,%ecx
- andl %esi,%r15d
-.byte 143,232,120,194,251,13
- xorl %edx,%r14d
- addl %r13d,%ecx
- vpsrld $10,%xmm3,%xmm6
- xorl %r8d,%r15d
- addl %ecx,%r10d
-.byte 143,232,120,194,239,2
- rorl $2,%r14d
- addl %r15d,%ecx
- vpxor %xmm6,%xmm7,%xmm7
- movl %r10d,%r13d
- addl %ecx,%r14d
- rorl $14,%r13d
- movl %r14d,%ecx
- vpxor %xmm5,%xmm7,%xmm7
- movl %r11d,%r12d
- xorl %r10d,%r13d
- rorl $9,%r14d
- xorl %eax,%r12d
- vpslldq $8,%xmm7,%xmm7
- rorl $5,%r13d
- xorl %ecx,%r14d
- andl %r10d,%r12d
- vpand %xmm13,%xmm11,%xmm11
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 224-128(%rdi),%xmm10
- xorl %r10d,%r13d
- vpaddd %xmm7,%xmm3,%xmm3
- addl 56(%rsp),%ebx
- movl %ecx,%r15d
- rorl $11,%r14d
- xorl %eax,%r12d
- vpaddd 96(%rbp),%xmm3,%xmm6
- xorl %edx,%r15d
- rorl $6,%r13d
- addl %r12d,%ebx
- andl %r15d,%esi
- xorl %ecx,%r14d
- addl %r13d,%ebx
- xorl %edx,%esi
- addl %ebx,%r9d
- rorl $2,%r14d
- addl %esi,%ebx
- movl %r9d,%r13d
- addl %ebx,%r14d
- rorl $14,%r13d
- movl %r14d,%ebx
- movl %r10d,%r12d
- xorl %r9d,%r13d
- rorl $9,%r14d
- xorl %r11d,%r12d
- rorl $5,%r13d
- xorl %ebx,%r14d
- andl %r9d,%r12d
- vpor %xmm11,%xmm8,%xmm8
- vaesenclast %xmm10,%xmm9,%xmm11
- vmovdqu 0-128(%rdi),%xmm10
- xorl %r9d,%r13d
- addl 60(%rsp),%eax
- movl %ebx,%esi
- rorl $11,%r14d
- xorl %r11d,%r12d
- xorl %ecx,%esi
- rorl $6,%r13d
- addl %r12d,%eax
- andl %esi,%r15d
- xorl %ebx,%r14d
- addl %r13d,%eax
- xorl %ecx,%r15d
- addl %eax,%r8d
- rorl $2,%r14d
- addl %r15d,%eax
- movl %r8d,%r13d
- addl %eax,%r14d
- vmovdqa %xmm6,48(%rsp)
- movq 64+0(%rsp),%r12
- vpand %xmm14,%xmm11,%xmm11
- movq 64+8(%rsp),%r15
- vpor %xmm11,%xmm8,%xmm8
- vmovdqu %xmm8,(%r15,%r12,1)
- leaq 16(%r12),%r12
- cmpb $0,131(%rbp)
- jne .Lxop_00_47
- vmovdqu (%r12),%xmm9
- movq %r12,64+0(%rsp)
- rorl $14,%r13d
- movl %r14d,%eax
- movl %r9d,%r12d
- xorl %r8d,%r13d
- rorl $9,%r14d
- xorl %r10d,%r12d
- rorl $5,%r13d
- xorl %eax,%r14d
- andl %r8d,%r12d
- vpxor %xmm10,%xmm9,%xmm9
- vmovdqu 16-128(%rdi),%xmm10
- xorl %r8d,%r13d
- addl 0(%rsp),%r11d
- movl %eax,%r15d
- rorl $11,%r14d
- xorl %r10d,%r12d
- xorl %ebx,%r15d
- rorl $6,%r13d
- addl %r12d,%r11d
- andl %r15d,%esi
- xorl %eax,%r14d
- addl %r13d,%r11d
- xorl %ebx,%esi
- addl %r11d,%edx
- rorl $2,%r14d
- addl %esi,%r11d
- movl %edx,%r13d
- addl %r11d,%r14d
- rorl $14,%r13d
- movl %r14d,%r11d
- movl %r8d,%r12d
- xorl %edx,%r13d
- rorl $9,%r14d
- xorl %r9d,%r12d
- rorl $5,%r13d
- xorl %r11d,%r14d
- andl %edx,%r12d
- vpxor %xmm8,%xmm9,%xmm9
- xorl %edx,%r13d
- addl 4(%rsp),%r10d
- movl %r11d,%esi
- rorl $11,%r14d
- xorl %r9d,%r12d
- xorl %eax,%esi
- rorl $6,%r13d
- addl %r12d,%r10d
- andl %esi,%r15d
- xorl %r11d,%r14d
- addl %r13d,%r10d
- xorl %eax,%r15d
- addl %r10d,%ecx
- rorl $2,%r14d
- addl %r15d,%r10d
- movl %ecx,%r13d
- addl %r10d,%r14d
- rorl $14,%r13d
- movl %r14d,%r10d
- movl %edx,%r12d
- xorl %ecx,%r13d
- rorl $9,%r14d
- xorl %r8d,%r12d
- rorl $5,%r13d
- xorl %r10d,%r14d
- andl %ecx,%r12d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 32-128(%rdi),%xmm10
- xorl %ecx,%r13d
- addl 8(%rsp),%r9d
- movl %r10d,%r15d
- rorl $11,%r14d
- xorl %r8d,%r12d
- xorl %r11d,%r15d
- rorl $6,%r13d
- addl %r12d,%r9d
- andl %r15d,%esi
- xorl %r10d,%r14d
- addl %r13d,%r9d
- xorl %r11d,%esi
- addl %r9d,%ebx
- rorl $2,%r14d
- addl %esi,%r9d
- movl %ebx,%r13d
- addl %r9d,%r14d
- rorl $14,%r13d
- movl %r14d,%r9d
- movl %ecx,%r12d
- xorl %ebx,%r13d
- rorl $9,%r14d
- xorl %edx,%r12d
- rorl $5,%r13d
- xorl %r9d,%r14d
- andl %ebx,%r12d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 48-128(%rdi),%xmm10
- xorl %ebx,%r13d
- addl 12(%rsp),%r8d
- movl %r9d,%esi
- rorl $11,%r14d
- xorl %edx,%r12d
- xorl %r10d,%esi
- rorl $6,%r13d
- addl %r12d,%r8d
- andl %esi,%r15d
- xorl %r9d,%r14d
- addl %r13d,%r8d
- xorl %r10d,%r15d
- addl %r8d,%eax
- rorl $2,%r14d
- addl %r15d,%r8d
- movl %eax,%r13d
- addl %r8d,%r14d
- rorl $14,%r13d
- movl %r14d,%r8d
- movl %ebx,%r12d
- xorl %eax,%r13d
- rorl $9,%r14d
- xorl %ecx,%r12d
- rorl $5,%r13d
- xorl %r8d,%r14d
- andl %eax,%r12d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 64-128(%rdi),%xmm10
- xorl %eax,%r13d
- addl 16(%rsp),%edx
- movl %r8d,%r15d
- rorl $11,%r14d
- xorl %ecx,%r12d
- xorl %r9d,%r15d
- rorl $6,%r13d
- addl %r12d,%edx
- andl %r15d,%esi
- xorl %r8d,%r14d
- addl %r13d,%edx
- xorl %r9d,%esi
- addl %edx,%r11d
- rorl $2,%r14d
- addl %esi,%edx
- movl %r11d,%r13d
- addl %edx,%r14d
- rorl $14,%r13d
- movl %r14d,%edx
- movl %eax,%r12d
- xorl %r11d,%r13d
- rorl $9,%r14d
- xorl %ebx,%r12d
- rorl $5,%r13d
- xorl %edx,%r14d
- andl %r11d,%r12d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 80-128(%rdi),%xmm10
- xorl %r11d,%r13d
- addl 20(%rsp),%ecx
- movl %edx,%esi
- rorl $11,%r14d
- xorl %ebx,%r12d
- xorl %r8d,%esi
- rorl $6,%r13d
- addl %r12d,%ecx
- andl %esi,%r15d
- xorl %edx,%r14d
- addl %r13d,%ecx
- xorl %r8d,%r15d
- addl %ecx,%r10d
- rorl $2,%r14d
- addl %r15d,%ecx
- movl %r10d,%r13d
- addl %ecx,%r14d
- rorl $14,%r13d
- movl %r14d,%ecx
- movl %r11d,%r12d
- xorl %r10d,%r13d
- rorl $9,%r14d
- xorl %eax,%r12d
- rorl $5,%r13d
- xorl %ecx,%r14d
- andl %r10d,%r12d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 96-128(%rdi),%xmm10
- xorl %r10d,%r13d
- addl 24(%rsp),%ebx
- movl %ecx,%r15d
- rorl $11,%r14d
- xorl %eax,%r12d
- xorl %edx,%r15d
- rorl $6,%r13d
- addl %r12d,%ebx
- andl %r15d,%esi
- xorl %ecx,%r14d
- addl %r13d,%ebx
- xorl %edx,%esi
- addl %ebx,%r9d
- rorl $2,%r14d
- addl %esi,%ebx
- movl %r9d,%r13d
- addl %ebx,%r14d
- rorl $14,%r13d
- movl %r14d,%ebx
- movl %r10d,%r12d
- xorl %r9d,%r13d
- rorl $9,%r14d
- xorl %r11d,%r12d
- rorl $5,%r13d
- xorl %ebx,%r14d
- andl %r9d,%r12d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 112-128(%rdi),%xmm10
- xorl %r9d,%r13d
- addl 28(%rsp),%eax
- movl %ebx,%esi
- rorl $11,%r14d
- xorl %r11d,%r12d
- xorl %ecx,%esi
- rorl $6,%r13d
- addl %r12d,%eax
- andl %esi,%r15d
- xorl %ebx,%r14d
- addl %r13d,%eax
- xorl %ecx,%r15d
- addl %eax,%r8d
- rorl $2,%r14d
- addl %r15d,%eax
- movl %r8d,%r13d
- addl %eax,%r14d
- rorl $14,%r13d
- movl %r14d,%eax
- movl %r9d,%r12d
- xorl %r8d,%r13d
- rorl $9,%r14d
- xorl %r10d,%r12d
- rorl $5,%r13d
- xorl %eax,%r14d
- andl %r8d,%r12d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 128-128(%rdi),%xmm10
- xorl %r8d,%r13d
- addl 32(%rsp),%r11d
- movl %eax,%r15d
- rorl $11,%r14d
- xorl %r10d,%r12d
- xorl %ebx,%r15d
- rorl $6,%r13d
- addl %r12d,%r11d
- andl %r15d,%esi
- xorl %eax,%r14d
- addl %r13d,%r11d
- xorl %ebx,%esi
- addl %r11d,%edx
- rorl $2,%r14d
- addl %esi,%r11d
- movl %edx,%r13d
- addl %r11d,%r14d
- rorl $14,%r13d
- movl %r14d,%r11d
- movl %r8d,%r12d
- xorl %edx,%r13d
- rorl $9,%r14d
- xorl %r9d,%r12d
- rorl $5,%r13d
- xorl %r11d,%r14d
- andl %edx,%r12d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 144-128(%rdi),%xmm10
- xorl %edx,%r13d
- addl 36(%rsp),%r10d
- movl %r11d,%esi
- rorl $11,%r14d
- xorl %r9d,%r12d
- xorl %eax,%esi
- rorl $6,%r13d
- addl %r12d,%r10d
- andl %esi,%r15d
- xorl %r11d,%r14d
- addl %r13d,%r10d
- xorl %eax,%r15d
- addl %r10d,%ecx
- rorl $2,%r14d
- addl %r15d,%r10d
- movl %ecx,%r13d
- addl %r10d,%r14d
- rorl $14,%r13d
- movl %r14d,%r10d
- movl %edx,%r12d
- xorl %ecx,%r13d
- rorl $9,%r14d
- xorl %r8d,%r12d
- rorl $5,%r13d
- xorl %r10d,%r14d
- andl %ecx,%r12d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 160-128(%rdi),%xmm10
- xorl %ecx,%r13d
- addl 40(%rsp),%r9d
- movl %r10d,%r15d
- rorl $11,%r14d
- xorl %r8d,%r12d
- xorl %r11d,%r15d
- rorl $6,%r13d
- addl %r12d,%r9d
- andl %r15d,%esi
- xorl %r10d,%r14d
- addl %r13d,%r9d
- xorl %r11d,%esi
- addl %r9d,%ebx
- rorl $2,%r14d
- addl %esi,%r9d
- movl %ebx,%r13d
- addl %r9d,%r14d
- rorl $14,%r13d
- movl %r14d,%r9d
- movl %ecx,%r12d
- xorl %ebx,%r13d
- rorl $9,%r14d
- xorl %edx,%r12d
- rorl $5,%r13d
- xorl %r9d,%r14d
- andl %ebx,%r12d
- vaesenclast %xmm10,%xmm9,%xmm11
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 176-128(%rdi),%xmm10
- xorl %ebx,%r13d
- addl 44(%rsp),%r8d
- movl %r9d,%esi
- rorl $11,%r14d
- xorl %edx,%r12d
- xorl %r10d,%esi
- rorl $6,%r13d
- addl %r12d,%r8d
- andl %esi,%r15d
- xorl %r9d,%r14d
- addl %r13d,%r8d
- xorl %r10d,%r15d
- addl %r8d,%eax
- rorl $2,%r14d
- addl %r15d,%r8d
- movl %eax,%r13d
- addl %r8d,%r14d
- rorl $14,%r13d
- movl %r14d,%r8d
- movl %ebx,%r12d
- xorl %eax,%r13d
- rorl $9,%r14d
- xorl %ecx,%r12d
- rorl $5,%r13d
- xorl %r8d,%r14d
- andl %eax,%r12d
- vpand %xmm12,%xmm11,%xmm8
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 192-128(%rdi),%xmm10
- xorl %eax,%r13d
- addl 48(%rsp),%edx
- movl %r8d,%r15d
- rorl $11,%r14d
- xorl %ecx,%r12d
- xorl %r9d,%r15d
- rorl $6,%r13d
- addl %r12d,%edx
- andl %r15d,%esi
- xorl %r8d,%r14d
- addl %r13d,%edx
- xorl %r9d,%esi
- addl %edx,%r11d
- rorl $2,%r14d
- addl %esi,%edx
- movl %r11d,%r13d
- addl %edx,%r14d
- rorl $14,%r13d
- movl %r14d,%edx
- movl %eax,%r12d
- xorl %r11d,%r13d
- rorl $9,%r14d
- xorl %ebx,%r12d
- rorl $5,%r13d
- xorl %edx,%r14d
- andl %r11d,%r12d
- vaesenclast %xmm10,%xmm9,%xmm11
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 208-128(%rdi),%xmm10
- xorl %r11d,%r13d
- addl 52(%rsp),%ecx
- movl %edx,%esi
- rorl $11,%r14d
- xorl %ebx,%r12d
- xorl %r8d,%esi
- rorl $6,%r13d
- addl %r12d,%ecx
- andl %esi,%r15d
- xorl %edx,%r14d
- addl %r13d,%ecx
- xorl %r8d,%r15d
- addl %ecx,%r10d
- rorl $2,%r14d
- addl %r15d,%ecx
- movl %r10d,%r13d
- addl %ecx,%r14d
- rorl $14,%r13d
- movl %r14d,%ecx
- movl %r11d,%r12d
- xorl %r10d,%r13d
- rorl $9,%r14d
- xorl %eax,%r12d
- rorl $5,%r13d
- xorl %ecx,%r14d
- andl %r10d,%r12d
- vpand %xmm13,%xmm11,%xmm11
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 224-128(%rdi),%xmm10
- xorl %r10d,%r13d
- addl 56(%rsp),%ebx
- movl %ecx,%r15d
- rorl $11,%r14d
- xorl %eax,%r12d
- xorl %edx,%r15d
- rorl $6,%r13d
- addl %r12d,%ebx
- andl %r15d,%esi
- xorl %ecx,%r14d
- addl %r13d,%ebx
- xorl %edx,%esi
- addl %ebx,%r9d
- rorl $2,%r14d
- addl %esi,%ebx
- movl %r9d,%r13d
- addl %ebx,%r14d
- rorl $14,%r13d
- movl %r14d,%ebx
- movl %r10d,%r12d
- xorl %r9d,%r13d
- rorl $9,%r14d
- xorl %r11d,%r12d
- rorl $5,%r13d
- xorl %ebx,%r14d
- andl %r9d,%r12d
- vpor %xmm11,%xmm8,%xmm8
- vaesenclast %xmm10,%xmm9,%xmm11
- vmovdqu 0-128(%rdi),%xmm10
- xorl %r9d,%r13d
- addl 60(%rsp),%eax
- movl %ebx,%esi
- rorl $11,%r14d
- xorl %r11d,%r12d
- xorl %ecx,%esi
- rorl $6,%r13d
- addl %r12d,%eax
- andl %esi,%r15d
- xorl %ebx,%r14d
- addl %r13d,%eax
- xorl %ecx,%r15d
- addl %eax,%r8d
- rorl $2,%r14d
- addl %r15d,%eax
- movl %r8d,%r13d
- addl %eax,%r14d
- movq 64+0(%rsp),%r12
- movq 64+8(%rsp),%r13
- movq 64+40(%rsp),%r15
- movq 64+48(%rsp),%rsi
-
- vpand %xmm14,%xmm11,%xmm11
- movl %r14d,%eax
- vpor %xmm11,%xmm8,%xmm8
- vmovdqu %xmm8,(%r12,%r13,1)
- leaq 16(%r12),%r12
-
- addl 0(%r15),%eax
- addl 4(%r15),%ebx
- addl 8(%r15),%ecx
- addl 12(%r15),%edx
- addl 16(%r15),%r8d
- addl 20(%r15),%r9d
- addl 24(%r15),%r10d
- addl 28(%r15),%r11d
-
- cmpq 64+16(%rsp),%r12
-
- movl %eax,0(%r15)
- movl %ebx,4(%r15)
- movl %ecx,8(%r15)
- movl %edx,12(%r15)
- movl %r8d,16(%r15)
- movl %r9d,20(%r15)
- movl %r10d,24(%r15)
- movl %r11d,28(%r15)
-
- jb .Lloop_xop
-
- movq 64+32(%rsp),%r8
- movq 120(%rsp),%rsi
-.cfi_def_cfa %rsi,8
- vmovdqu %xmm8,(%r8)
- vzeroall
- movq -48(%rsi),%r15
-.cfi_restore %r15
- movq -40(%rsi),%r14
-.cfi_restore %r14
- movq -32(%rsi),%r13
-.cfi_restore %r13
- movq -24(%rsi),%r12
-.cfi_restore %r12
- movq -16(%rsi),%rbp
-.cfi_restore %rbp
- movq -8(%rsi),%rbx
-.cfi_restore %rbx
- leaq (%rsi),%rsp
-.cfi_def_cfa_register %rsp
-.Lepilogue_xop:
- .byte 0xf3,0xc3
-.cfi_endproc
-.size aesni_cbc_sha256_enc_xop,.-aesni_cbc_sha256_enc_xop
-.type aesni_cbc_sha256_enc_avx,@function
-.align 64
-aesni_cbc_sha256_enc_avx:
-.cfi_startproc
-.Lavx_shortcut:
- movq 8(%rsp),%r10
- movq %rsp,%rax
-.cfi_def_cfa_register %rax
- pushq %rbx
-.cfi_offset %rbx,-16
- pushq %rbp
-.cfi_offset %rbp,-24
- pushq %r12
-.cfi_offset %r12,-32
- pushq %r13
-.cfi_offset %r13,-40
- pushq %r14
-.cfi_offset %r14,-48
- pushq %r15
-.cfi_offset %r15,-56
- subq $128,%rsp
- andq $-64,%rsp
-
- shlq $6,%rdx
- subq %rdi,%rsi
- subq %rdi,%r10
- addq %rdi,%rdx
-
-
- movq %rsi,64+8(%rsp)
- movq %rdx,64+16(%rsp)
-
- movq %r8,64+32(%rsp)
- movq %r9,64+40(%rsp)
- movq %r10,64+48(%rsp)
- movq %rax,120(%rsp)
-.cfi_escape 0x0f,0x06,0x77,0xf8,0x00,0x06,0x23,0x08
-.Lprologue_avx:
- vzeroall
-
- movq %rdi,%r12
- leaq 128(%rcx),%rdi
- leaq K256+544(%rip),%r13
- movl 240-128(%rdi),%r14d
- movq %r9,%r15
- movq %r10,%rsi
- vmovdqu (%r8),%xmm8
- subq $9,%r14
-
- movl 0(%r15),%eax
- movl 4(%r15),%ebx
- movl 8(%r15),%ecx
- movl 12(%r15),%edx
- movl 16(%r15),%r8d
- movl 20(%r15),%r9d
- movl 24(%r15),%r10d
- movl 28(%r15),%r11d
-
- vmovdqa 0(%r13,%r14,8),%xmm14
- vmovdqa 16(%r13,%r14,8),%xmm13
- vmovdqa 32(%r13,%r14,8),%xmm12
- vmovdqu 0-128(%rdi),%xmm10
- jmp .Lloop_avx
-.align 16
-.Lloop_avx:
- vmovdqa K256+512(%rip),%xmm7
- vmovdqu 0(%rsi,%r12,1),%xmm0
- vmovdqu 16(%rsi,%r12,1),%xmm1
- vmovdqu 32(%rsi,%r12,1),%xmm2
- vmovdqu 48(%rsi,%r12,1),%xmm3
- vpshufb %xmm7,%xmm0,%xmm0
- leaq K256(%rip),%rbp
- vpshufb %xmm7,%xmm1,%xmm1
- vpshufb %xmm7,%xmm2,%xmm2
- vpaddd 0(%rbp),%xmm0,%xmm4
- vpshufb %xmm7,%xmm3,%xmm3
- vpaddd 32(%rbp),%xmm1,%xmm5
- vpaddd 64(%rbp),%xmm2,%xmm6
- vpaddd 96(%rbp),%xmm3,%xmm7
- vmovdqa %xmm4,0(%rsp)
- movl %eax,%r14d
- vmovdqa %xmm5,16(%rsp)
- movl %ebx,%esi
- vmovdqa %xmm6,32(%rsp)
- xorl %ecx,%esi
- vmovdqa %xmm7,48(%rsp)
- movl %r8d,%r13d
- jmp .Lavx_00_47
-
-.align 16
-.Lavx_00_47:
- subq $-32*4,%rbp
- vmovdqu (%r12),%xmm9
- movq %r12,64+0(%rsp)
- vpalignr $4,%xmm0,%xmm1,%xmm4
- shrdl $14,%r13d,%r13d
- movl %r14d,%eax
- movl %r9d,%r12d
- vpalignr $4,%xmm2,%xmm3,%xmm7
- xorl %r8d,%r13d
- shrdl $9,%r14d,%r14d
- xorl %r10d,%r12d
- vpsrld $7,%xmm4,%xmm6
- shrdl $5,%r13d,%r13d
- xorl %eax,%r14d
- andl %r8d,%r12d
- vpaddd %xmm7,%xmm0,%xmm0
- vpxor %xmm10,%xmm9,%xmm9
- vmovdqu 16-128(%rdi),%xmm10
- xorl %r8d,%r13d
- addl 0(%rsp),%r11d
- movl %eax,%r15d
- vpsrld $3,%xmm4,%xmm7
- shrdl $11,%r14d,%r14d
- xorl %r10d,%r12d
- xorl %ebx,%r15d
- vpslld $14,%xmm4,%xmm5
- shrdl $6,%r13d,%r13d
- addl %r12d,%r11d
- andl %r15d,%esi
- vpxor %xmm6,%xmm7,%xmm4
- xorl %eax,%r14d
- addl %r13d,%r11d
- xorl %ebx,%esi
- vpshufd $250,%xmm3,%xmm7
- addl %r11d,%edx
- shrdl $2,%r14d,%r14d
- addl %esi,%r11d
- vpsrld $11,%xmm6,%xmm6
- movl %edx,%r13d
- addl %r11d,%r14d
- shrdl $14,%r13d,%r13d
- vpxor %xmm5,%xmm4,%xmm4
- movl %r14d,%r11d
- movl %r8d,%r12d
- xorl %edx,%r13d
- vpslld $11,%xmm5,%xmm5
- shrdl $9,%r14d,%r14d
- xorl %r9d,%r12d
- shrdl $5,%r13d,%r13d
- vpxor %xmm6,%xmm4,%xmm4
- xorl %r11d,%r14d
- andl %edx,%r12d
- vpxor %xmm8,%xmm9,%xmm9
- xorl %edx,%r13d
- vpsrld $10,%xmm7,%xmm6
- addl 4(%rsp),%r10d
- movl %r11d,%esi
- shrdl $11,%r14d,%r14d
- vpxor %xmm5,%xmm4,%xmm4
- xorl %r9d,%r12d
- xorl %eax,%esi
- shrdl $6,%r13d,%r13d
- vpsrlq $17,%xmm7,%xmm7
- addl %r12d,%r10d
- andl %esi,%r15d
- xorl %r11d,%r14d
- vpaddd %xmm4,%xmm0,%xmm0
- addl %r13d,%r10d
- xorl %eax,%r15d
- addl %r10d,%ecx
- vpxor %xmm7,%xmm6,%xmm6
- shrdl $2,%r14d,%r14d
- addl %r15d,%r10d
- movl %ecx,%r13d
- vpsrlq $2,%xmm7,%xmm7
- addl %r10d,%r14d
- shrdl $14,%r13d,%r13d
- movl %r14d,%r10d
- vpxor %xmm7,%xmm6,%xmm6
- movl %edx,%r12d
- xorl %ecx,%r13d
- shrdl $9,%r14d,%r14d
- vpshufd $132,%xmm6,%xmm6
- xorl %r8d,%r12d
- shrdl $5,%r13d,%r13d
- xorl %r10d,%r14d
- vpsrldq $8,%xmm6,%xmm6
- andl %ecx,%r12d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 32-128(%rdi),%xmm10
- xorl %ecx,%r13d
- addl 8(%rsp),%r9d
- vpaddd %xmm6,%xmm0,%xmm0
- movl %r10d,%r15d
- shrdl $11,%r14d,%r14d
- xorl %r8d,%r12d
- vpshufd $80,%xmm0,%xmm7
- xorl %r11d,%r15d
- shrdl $6,%r13d,%r13d
- addl %r12d,%r9d
- vpsrld $10,%xmm7,%xmm6
- andl %r15d,%esi
- xorl %r10d,%r14d
- addl %r13d,%r9d
- vpsrlq $17,%xmm7,%xmm7
- xorl %r11d,%esi
- addl %r9d,%ebx
- shrdl $2,%r14d,%r14d
- vpxor %xmm7,%xmm6,%xmm6
- addl %esi,%r9d
- movl %ebx,%r13d
- addl %r9d,%r14d
- vpsrlq $2,%xmm7,%xmm7
- shrdl $14,%r13d,%r13d
- movl %r14d,%r9d
- movl %ecx,%r12d
- vpxor %xmm7,%xmm6,%xmm6
- xorl %ebx,%r13d
- shrdl $9,%r14d,%r14d
- xorl %edx,%r12d
- vpshufd $232,%xmm6,%xmm6
- shrdl $5,%r13d,%r13d
- xorl %r9d,%r14d
- andl %ebx,%r12d
- vpslldq $8,%xmm6,%xmm6
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 48-128(%rdi),%xmm10
- xorl %ebx,%r13d
- addl 12(%rsp),%r8d
- movl %r9d,%esi
- vpaddd %xmm6,%xmm0,%xmm0
- shrdl $11,%r14d,%r14d
- xorl %edx,%r12d
- xorl %r10d,%esi
- vpaddd 0(%rbp),%xmm0,%xmm6
- shrdl $6,%r13d,%r13d
- addl %r12d,%r8d
- andl %esi,%r15d
- xorl %r9d,%r14d
- addl %r13d,%r8d
- xorl %r10d,%r15d
- addl %r8d,%eax
- shrdl $2,%r14d,%r14d
- addl %r15d,%r8d
- movl %eax,%r13d
- addl %r8d,%r14d
- vmovdqa %xmm6,0(%rsp)
- vpalignr $4,%xmm1,%xmm2,%xmm4
- shrdl $14,%r13d,%r13d
- movl %r14d,%r8d
- movl %ebx,%r12d
- vpalignr $4,%xmm3,%xmm0,%xmm7
- xorl %eax,%r13d
- shrdl $9,%r14d,%r14d
- xorl %ecx,%r12d
- vpsrld $7,%xmm4,%xmm6
- shrdl $5,%r13d,%r13d
- xorl %r8d,%r14d
- andl %eax,%r12d
- vpaddd %xmm7,%xmm1,%xmm1
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 64-128(%rdi),%xmm10
- xorl %eax,%r13d
- addl 16(%rsp),%edx
- movl %r8d,%r15d
- vpsrld $3,%xmm4,%xmm7
- shrdl $11,%r14d,%r14d
- xorl %ecx,%r12d
- xorl %r9d,%r15d
- vpslld $14,%xmm4,%xmm5
- shrdl $6,%r13d,%r13d
- addl %r12d,%edx
- andl %r15d,%esi
- vpxor %xmm6,%xmm7,%xmm4
- xorl %r8d,%r14d
- addl %r13d,%edx
- xorl %r9d,%esi
- vpshufd $250,%xmm0,%xmm7
- addl %edx,%r11d
- shrdl $2,%r14d,%r14d
- addl %esi,%edx
- vpsrld $11,%xmm6,%xmm6
- movl %r11d,%r13d
- addl %edx,%r14d
- shrdl $14,%r13d,%r13d
- vpxor %xmm5,%xmm4,%xmm4
- movl %r14d,%edx
- movl %eax,%r12d
- xorl %r11d,%r13d
- vpslld $11,%xmm5,%xmm5
- shrdl $9,%r14d,%r14d
- xorl %ebx,%r12d
- shrdl $5,%r13d,%r13d
- vpxor %xmm6,%xmm4,%xmm4
- xorl %edx,%r14d
- andl %r11d,%r12d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 80-128(%rdi),%xmm10
- xorl %r11d,%r13d
- vpsrld $10,%xmm7,%xmm6
- addl 20(%rsp),%ecx
- movl %edx,%esi
- shrdl $11,%r14d,%r14d
- vpxor %xmm5,%xmm4,%xmm4
- xorl %ebx,%r12d
- xorl %r8d,%esi
- shrdl $6,%r13d,%r13d
- vpsrlq $17,%xmm7,%xmm7
- addl %r12d,%ecx
- andl %esi,%r15d
- xorl %edx,%r14d
- vpaddd %xmm4,%xmm1,%xmm1
- addl %r13d,%ecx
- xorl %r8d,%r15d
- addl %ecx,%r10d
- vpxor %xmm7,%xmm6,%xmm6
- shrdl $2,%r14d,%r14d
- addl %r15d,%ecx
- movl %r10d,%r13d
- vpsrlq $2,%xmm7,%xmm7
- addl %ecx,%r14d
- shrdl $14,%r13d,%r13d
- movl %r14d,%ecx
- vpxor %xmm7,%xmm6,%xmm6
- movl %r11d,%r12d
- xorl %r10d,%r13d
- shrdl $9,%r14d,%r14d
- vpshufd $132,%xmm6,%xmm6
- xorl %eax,%r12d
- shrdl $5,%r13d,%r13d
- xorl %ecx,%r14d
- vpsrldq $8,%xmm6,%xmm6
- andl %r10d,%r12d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 96-128(%rdi),%xmm10
- xorl %r10d,%r13d
- addl 24(%rsp),%ebx
- vpaddd %xmm6,%xmm1,%xmm1
- movl %ecx,%r15d
- shrdl $11,%r14d,%r14d
- xorl %eax,%r12d
- vpshufd $80,%xmm1,%xmm7
- xorl %edx,%r15d
- shrdl $6,%r13d,%r13d
- addl %r12d,%ebx
- vpsrld $10,%xmm7,%xmm6
- andl %r15d,%esi
- xorl %ecx,%r14d
- addl %r13d,%ebx
- vpsrlq $17,%xmm7,%xmm7
- xorl %edx,%esi
- addl %ebx,%r9d
- shrdl $2,%r14d,%r14d
- vpxor %xmm7,%xmm6,%xmm6
- addl %esi,%ebx
- movl %r9d,%r13d
- addl %ebx,%r14d
- vpsrlq $2,%xmm7,%xmm7
- shrdl $14,%r13d,%r13d
- movl %r14d,%ebx
- movl %r10d,%r12d
- vpxor %xmm7,%xmm6,%xmm6
- xorl %r9d,%r13d
- shrdl $9,%r14d,%r14d
- xorl %r11d,%r12d
- vpshufd $232,%xmm6,%xmm6
- shrdl $5,%r13d,%r13d
- xorl %ebx,%r14d
- andl %r9d,%r12d
- vpslldq $8,%xmm6,%xmm6
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 112-128(%rdi),%xmm10
- xorl %r9d,%r13d
- addl 28(%rsp),%eax
- movl %ebx,%esi
- vpaddd %xmm6,%xmm1,%xmm1
- shrdl $11,%r14d,%r14d
- xorl %r11d,%r12d
- xorl %ecx,%esi
- vpaddd 32(%rbp),%xmm1,%xmm6
- shrdl $6,%r13d,%r13d
- addl %r12d,%eax
- andl %esi,%r15d
- xorl %ebx,%r14d
- addl %r13d,%eax
- xorl %ecx,%r15d
- addl %eax,%r8d
- shrdl $2,%r14d,%r14d
- addl %r15d,%eax
- movl %r8d,%r13d
- addl %eax,%r14d
- vmovdqa %xmm6,16(%rsp)
- vpalignr $4,%xmm2,%xmm3,%xmm4
- shrdl $14,%r13d,%r13d
- movl %r14d,%eax
- movl %r9d,%r12d
- vpalignr $4,%xmm0,%xmm1,%xmm7
- xorl %r8d,%r13d
- shrdl $9,%r14d,%r14d
- xorl %r10d,%r12d
- vpsrld $7,%xmm4,%xmm6
- shrdl $5,%r13d,%r13d
- xorl %eax,%r14d
- andl %r8d,%r12d
- vpaddd %xmm7,%xmm2,%xmm2
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 128-128(%rdi),%xmm10
- xorl %r8d,%r13d
- addl 32(%rsp),%r11d
- movl %eax,%r15d
- vpsrld $3,%xmm4,%xmm7
- shrdl $11,%r14d,%r14d
- xorl %r10d,%r12d
- xorl %ebx,%r15d
- vpslld $14,%xmm4,%xmm5
- shrdl $6,%r13d,%r13d
- addl %r12d,%r11d
- andl %r15d,%esi
- vpxor %xmm6,%xmm7,%xmm4
- xorl %eax,%r14d
- addl %r13d,%r11d
- xorl %ebx,%esi
- vpshufd $250,%xmm1,%xmm7
- addl %r11d,%edx
- shrdl $2,%r14d,%r14d
- addl %esi,%r11d
- vpsrld $11,%xmm6,%xmm6
- movl %edx,%r13d
- addl %r11d,%r14d
- shrdl $14,%r13d,%r13d
- vpxor %xmm5,%xmm4,%xmm4
- movl %r14d,%r11d
- movl %r8d,%r12d
- xorl %edx,%r13d
- vpslld $11,%xmm5,%xmm5
- shrdl $9,%r14d,%r14d
- xorl %r9d,%r12d
- shrdl $5,%r13d,%r13d
- vpxor %xmm6,%xmm4,%xmm4
- xorl %r11d,%r14d
- andl %edx,%r12d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 144-128(%rdi),%xmm10
- xorl %edx,%r13d
- vpsrld $10,%xmm7,%xmm6
- addl 36(%rsp),%r10d
- movl %r11d,%esi
- shrdl $11,%r14d,%r14d
- vpxor %xmm5,%xmm4,%xmm4
- xorl %r9d,%r12d
- xorl %eax,%esi
- shrdl $6,%r13d,%r13d
- vpsrlq $17,%xmm7,%xmm7
- addl %r12d,%r10d
- andl %esi,%r15d
- xorl %r11d,%r14d
- vpaddd %xmm4,%xmm2,%xmm2
- addl %r13d,%r10d
- xorl %eax,%r15d
- addl %r10d,%ecx
- vpxor %xmm7,%xmm6,%xmm6
- shrdl $2,%r14d,%r14d
- addl %r15d,%r10d
- movl %ecx,%r13d
- vpsrlq $2,%xmm7,%xmm7
- addl %r10d,%r14d
- shrdl $14,%r13d,%r13d
- movl %r14d,%r10d
- vpxor %xmm7,%xmm6,%xmm6
- movl %edx,%r12d
- xorl %ecx,%r13d
- shrdl $9,%r14d,%r14d
- vpshufd $132,%xmm6,%xmm6
- xorl %r8d,%r12d
- shrdl $5,%r13d,%r13d
- xorl %r10d,%r14d
- vpsrldq $8,%xmm6,%xmm6
- andl %ecx,%r12d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 160-128(%rdi),%xmm10
- xorl %ecx,%r13d
- addl 40(%rsp),%r9d
- vpaddd %xmm6,%xmm2,%xmm2
- movl %r10d,%r15d
- shrdl $11,%r14d,%r14d
- xorl %r8d,%r12d
- vpshufd $80,%xmm2,%xmm7
- xorl %r11d,%r15d
- shrdl $6,%r13d,%r13d
- addl %r12d,%r9d
- vpsrld $10,%xmm7,%xmm6
- andl %r15d,%esi
- xorl %r10d,%r14d
- addl %r13d,%r9d
- vpsrlq $17,%xmm7,%xmm7
- xorl %r11d,%esi
- addl %r9d,%ebx
- shrdl $2,%r14d,%r14d
- vpxor %xmm7,%xmm6,%xmm6
- addl %esi,%r9d
- movl %ebx,%r13d
- addl %r9d,%r14d
- vpsrlq $2,%xmm7,%xmm7
- shrdl $14,%r13d,%r13d
- movl %r14d,%r9d
- movl %ecx,%r12d
- vpxor %xmm7,%xmm6,%xmm6
- xorl %ebx,%r13d
- shrdl $9,%r14d,%r14d
- xorl %edx,%r12d
- vpshufd $232,%xmm6,%xmm6
- shrdl $5,%r13d,%r13d
- xorl %r9d,%r14d
- andl %ebx,%r12d
- vpslldq $8,%xmm6,%xmm6
- vaesenclast %xmm10,%xmm9,%xmm11
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 176-128(%rdi),%xmm10
- xorl %ebx,%r13d
- addl 44(%rsp),%r8d
- movl %r9d,%esi
- vpaddd %xmm6,%xmm2,%xmm2
- shrdl $11,%r14d,%r14d
- xorl %edx,%r12d
- xorl %r10d,%esi
- vpaddd 64(%rbp),%xmm2,%xmm6
- shrdl $6,%r13d,%r13d
- addl %r12d,%r8d
- andl %esi,%r15d
- xorl %r9d,%r14d
- addl %r13d,%r8d
- xorl %r10d,%r15d
- addl %r8d,%eax
- shrdl $2,%r14d,%r14d
- addl %r15d,%r8d
- movl %eax,%r13d
- addl %r8d,%r14d
- vmovdqa %xmm6,32(%rsp)
- vpalignr $4,%xmm3,%xmm0,%xmm4
- shrdl $14,%r13d,%r13d
- movl %r14d,%r8d
- movl %ebx,%r12d
- vpalignr $4,%xmm1,%xmm2,%xmm7
- xorl %eax,%r13d
- shrdl $9,%r14d,%r14d
- xorl %ecx,%r12d
- vpsrld $7,%xmm4,%xmm6
- shrdl $5,%r13d,%r13d
- xorl %r8d,%r14d
- andl %eax,%r12d
- vpaddd %xmm7,%xmm3,%xmm3
- vpand %xmm12,%xmm11,%xmm8
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 192-128(%rdi),%xmm10
- xorl %eax,%r13d
- addl 48(%rsp),%edx
- movl %r8d,%r15d
- vpsrld $3,%xmm4,%xmm7
- shrdl $11,%r14d,%r14d
- xorl %ecx,%r12d
- xorl %r9d,%r15d
- vpslld $14,%xmm4,%xmm5
- shrdl $6,%r13d,%r13d
- addl %r12d,%edx
- andl %r15d,%esi
- vpxor %xmm6,%xmm7,%xmm4
- xorl %r8d,%r14d
- addl %r13d,%edx
- xorl %r9d,%esi
- vpshufd $250,%xmm2,%xmm7
- addl %edx,%r11d
- shrdl $2,%r14d,%r14d
- addl %esi,%edx
- vpsrld $11,%xmm6,%xmm6
- movl %r11d,%r13d
- addl %edx,%r14d
- shrdl $14,%r13d,%r13d
- vpxor %xmm5,%xmm4,%xmm4
- movl %r14d,%edx
- movl %eax,%r12d
- xorl %r11d,%r13d
- vpslld $11,%xmm5,%xmm5
- shrdl $9,%r14d,%r14d
- xorl %ebx,%r12d
- shrdl $5,%r13d,%r13d
- vpxor %xmm6,%xmm4,%xmm4
- xorl %edx,%r14d
- andl %r11d,%r12d
- vaesenclast %xmm10,%xmm9,%xmm11
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 208-128(%rdi),%xmm10
- xorl %r11d,%r13d
- vpsrld $10,%xmm7,%xmm6
- addl 52(%rsp),%ecx
- movl %edx,%esi
- shrdl $11,%r14d,%r14d
- vpxor %xmm5,%xmm4,%xmm4
- xorl %ebx,%r12d
- xorl %r8d,%esi
- shrdl $6,%r13d,%r13d
- vpsrlq $17,%xmm7,%xmm7
- addl %r12d,%ecx
- andl %esi,%r15d
- xorl %edx,%r14d
- vpaddd %xmm4,%xmm3,%xmm3
- addl %r13d,%ecx
- xorl %r8d,%r15d
- addl %ecx,%r10d
- vpxor %xmm7,%xmm6,%xmm6
- shrdl $2,%r14d,%r14d
- addl %r15d,%ecx
- movl %r10d,%r13d
- vpsrlq $2,%xmm7,%xmm7
- addl %ecx,%r14d
- shrdl $14,%r13d,%r13d
- movl %r14d,%ecx
- vpxor %xmm7,%xmm6,%xmm6
- movl %r11d,%r12d
- xorl %r10d,%r13d
- shrdl $9,%r14d,%r14d
- vpshufd $132,%xmm6,%xmm6
- xorl %eax,%r12d
- shrdl $5,%r13d,%r13d
- xorl %ecx,%r14d
- vpsrldq $8,%xmm6,%xmm6
- andl %r10d,%r12d
- vpand %xmm13,%xmm11,%xmm11
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 224-128(%rdi),%xmm10
- xorl %r10d,%r13d
- addl 56(%rsp),%ebx
- vpaddd %xmm6,%xmm3,%xmm3
- movl %ecx,%r15d
- shrdl $11,%r14d,%r14d
- xorl %eax,%r12d
- vpshufd $80,%xmm3,%xmm7
- xorl %edx,%r15d
- shrdl $6,%r13d,%r13d
- addl %r12d,%ebx
- vpsrld $10,%xmm7,%xmm6
- andl %r15d,%esi
- xorl %ecx,%r14d
- addl %r13d,%ebx
- vpsrlq $17,%xmm7,%xmm7
- xorl %edx,%esi
- addl %ebx,%r9d
- shrdl $2,%r14d,%r14d
- vpxor %xmm7,%xmm6,%xmm6
- addl %esi,%ebx
- movl %r9d,%r13d
- addl %ebx,%r14d
- vpsrlq $2,%xmm7,%xmm7
- shrdl $14,%r13d,%r13d
- movl %r14d,%ebx
- movl %r10d,%r12d
- vpxor %xmm7,%xmm6,%xmm6
- xorl %r9d,%r13d
- shrdl $9,%r14d,%r14d
- xorl %r11d,%r12d
- vpshufd $232,%xmm6,%xmm6
- shrdl $5,%r13d,%r13d
- xorl %ebx,%r14d
- andl %r9d,%r12d
- vpslldq $8,%xmm6,%xmm6
- vpor %xmm11,%xmm8,%xmm8
- vaesenclast %xmm10,%xmm9,%xmm11
- vmovdqu 0-128(%rdi),%xmm10
- xorl %r9d,%r13d
- addl 60(%rsp),%eax
- movl %ebx,%esi
- vpaddd %xmm6,%xmm3,%xmm3
- shrdl $11,%r14d,%r14d
- xorl %r11d,%r12d
- xorl %ecx,%esi
- vpaddd 96(%rbp),%xmm3,%xmm6
- shrdl $6,%r13d,%r13d
- addl %r12d,%eax
- andl %esi,%r15d
- xorl %ebx,%r14d
- addl %r13d,%eax
- xorl %ecx,%r15d
- addl %eax,%r8d
- shrdl $2,%r14d,%r14d
- addl %r15d,%eax
- movl %r8d,%r13d
- addl %eax,%r14d
- vmovdqa %xmm6,48(%rsp)
- movq 64+0(%rsp),%r12
- vpand %xmm14,%xmm11,%xmm11
- movq 64+8(%rsp),%r15
- vpor %xmm11,%xmm8,%xmm8
- vmovdqu %xmm8,(%r15,%r12,1)
- leaq 16(%r12),%r12
- cmpb $0,131(%rbp)
- jne .Lavx_00_47
- vmovdqu (%r12),%xmm9
- movq %r12,64+0(%rsp)
- shrdl $14,%r13d,%r13d
- movl %r14d,%eax
- movl %r9d,%r12d
- xorl %r8d,%r13d
- shrdl $9,%r14d,%r14d
- xorl %r10d,%r12d
- shrdl $5,%r13d,%r13d
- xorl %eax,%r14d
- andl %r8d,%r12d
- vpxor %xmm10,%xmm9,%xmm9
- vmovdqu 16-128(%rdi),%xmm10
- xorl %r8d,%r13d
- addl 0(%rsp),%r11d
- movl %eax,%r15d
- shrdl $11,%r14d,%r14d
- xorl %r10d,%r12d
- xorl %ebx,%r15d
- shrdl $6,%r13d,%r13d
- addl %r12d,%r11d
- andl %r15d,%esi
- xorl %eax,%r14d
- addl %r13d,%r11d
- xorl %ebx,%esi
- addl %r11d,%edx
- shrdl $2,%r14d,%r14d
- addl %esi,%r11d
- movl %edx,%r13d
- addl %r11d,%r14d
- shrdl $14,%r13d,%r13d
- movl %r14d,%r11d
- movl %r8d,%r12d
- xorl %edx,%r13d
- shrdl $9,%r14d,%r14d
- xorl %r9d,%r12d
- shrdl $5,%r13d,%r13d
- xorl %r11d,%r14d
- andl %edx,%r12d
- vpxor %xmm8,%xmm9,%xmm9
- xorl %edx,%r13d
- addl 4(%rsp),%r10d
- movl %r11d,%esi
- shrdl $11,%r14d,%r14d
- xorl %r9d,%r12d
- xorl %eax,%esi
- shrdl $6,%r13d,%r13d
- addl %r12d,%r10d
- andl %esi,%r15d
- xorl %r11d,%r14d
- addl %r13d,%r10d
- xorl %eax,%r15d
- addl %r10d,%ecx
- shrdl $2,%r14d,%r14d
- addl %r15d,%r10d
- movl %ecx,%r13d
- addl %r10d,%r14d
- shrdl $14,%r13d,%r13d
- movl %r14d,%r10d
- movl %edx,%r12d
- xorl %ecx,%r13d
- shrdl $9,%r14d,%r14d
- xorl %r8d,%r12d
- shrdl $5,%r13d,%r13d
- xorl %r10d,%r14d
- andl %ecx,%r12d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 32-128(%rdi),%xmm10
- xorl %ecx,%r13d
- addl 8(%rsp),%r9d
- movl %r10d,%r15d
- shrdl $11,%r14d,%r14d
- xorl %r8d,%r12d
- xorl %r11d,%r15d
- shrdl $6,%r13d,%r13d
- addl %r12d,%r9d
- andl %r15d,%esi
- xorl %r10d,%r14d
- addl %r13d,%r9d
- xorl %r11d,%esi
- addl %r9d,%ebx
- shrdl $2,%r14d,%r14d
- addl %esi,%r9d
- movl %ebx,%r13d
- addl %r9d,%r14d
- shrdl $14,%r13d,%r13d
- movl %r14d,%r9d
- movl %ecx,%r12d
- xorl %ebx,%r13d
- shrdl $9,%r14d,%r14d
- xorl %edx,%r12d
- shrdl $5,%r13d,%r13d
- xorl %r9d,%r14d
- andl %ebx,%r12d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 48-128(%rdi),%xmm10
- xorl %ebx,%r13d
- addl 12(%rsp),%r8d
- movl %r9d,%esi
- shrdl $11,%r14d,%r14d
- xorl %edx,%r12d
- xorl %r10d,%esi
- shrdl $6,%r13d,%r13d
- addl %r12d,%r8d
- andl %esi,%r15d
- xorl %r9d,%r14d
- addl %r13d,%r8d
- xorl %r10d,%r15d
- addl %r8d,%eax
- shrdl $2,%r14d,%r14d
- addl %r15d,%r8d
- movl %eax,%r13d
- addl %r8d,%r14d
- shrdl $14,%r13d,%r13d
- movl %r14d,%r8d
- movl %ebx,%r12d
- xorl %eax,%r13d
- shrdl $9,%r14d,%r14d
- xorl %ecx,%r12d
- shrdl $5,%r13d,%r13d
- xorl %r8d,%r14d
- andl %eax,%r12d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 64-128(%rdi),%xmm10
- xorl %eax,%r13d
- addl 16(%rsp),%edx
- movl %r8d,%r15d
- shrdl $11,%r14d,%r14d
- xorl %ecx,%r12d
- xorl %r9d,%r15d
- shrdl $6,%r13d,%r13d
- addl %r12d,%edx
- andl %r15d,%esi
- xorl %r8d,%r14d
- addl %r13d,%edx
- xorl %r9d,%esi
- addl %edx,%r11d
- shrdl $2,%r14d,%r14d
- addl %esi,%edx
- movl %r11d,%r13d
- addl %edx,%r14d
- shrdl $14,%r13d,%r13d
- movl %r14d,%edx
- movl %eax,%r12d
- xorl %r11d,%r13d
- shrdl $9,%r14d,%r14d
- xorl %ebx,%r12d
- shrdl $5,%r13d,%r13d
- xorl %edx,%r14d
- andl %r11d,%r12d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 80-128(%rdi),%xmm10
- xorl %r11d,%r13d
- addl 20(%rsp),%ecx
- movl %edx,%esi
- shrdl $11,%r14d,%r14d
- xorl %ebx,%r12d
- xorl %r8d,%esi
- shrdl $6,%r13d,%r13d
- addl %r12d,%ecx
- andl %esi,%r15d
- xorl %edx,%r14d
- addl %r13d,%ecx
- xorl %r8d,%r15d
- addl %ecx,%r10d
- shrdl $2,%r14d,%r14d
- addl %r15d,%ecx
- movl %r10d,%r13d
- addl %ecx,%r14d
- shrdl $14,%r13d,%r13d
- movl %r14d,%ecx
- movl %r11d,%r12d
- xorl %r10d,%r13d
- shrdl $9,%r14d,%r14d
- xorl %eax,%r12d
- shrdl $5,%r13d,%r13d
- xorl %ecx,%r14d
- andl %r10d,%r12d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 96-128(%rdi),%xmm10
- xorl %r10d,%r13d
- addl 24(%rsp),%ebx
- movl %ecx,%r15d
- shrdl $11,%r14d,%r14d
- xorl %eax,%r12d
- xorl %edx,%r15d
- shrdl $6,%r13d,%r13d
- addl %r12d,%ebx
- andl %r15d,%esi
- xorl %ecx,%r14d
- addl %r13d,%ebx
- xorl %edx,%esi
- addl %ebx,%r9d
- shrdl $2,%r14d,%r14d
- addl %esi,%ebx
- movl %r9d,%r13d
- addl %ebx,%r14d
- shrdl $14,%r13d,%r13d
- movl %r14d,%ebx
- movl %r10d,%r12d
- xorl %r9d,%r13d
- shrdl $9,%r14d,%r14d
- xorl %r11d,%r12d
- shrdl $5,%r13d,%r13d
- xorl %ebx,%r14d
- andl %r9d,%r12d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 112-128(%rdi),%xmm10
- xorl %r9d,%r13d
- addl 28(%rsp),%eax
- movl %ebx,%esi
- shrdl $11,%r14d,%r14d
- xorl %r11d,%r12d
- xorl %ecx,%esi
- shrdl $6,%r13d,%r13d
- addl %r12d,%eax
- andl %esi,%r15d
- xorl %ebx,%r14d
- addl %r13d,%eax
- xorl %ecx,%r15d
- addl %eax,%r8d
- shrdl $2,%r14d,%r14d
- addl %r15d,%eax
- movl %r8d,%r13d
- addl %eax,%r14d
- shrdl $14,%r13d,%r13d
- movl %r14d,%eax
- movl %r9d,%r12d
- xorl %r8d,%r13d
- shrdl $9,%r14d,%r14d
- xorl %r10d,%r12d
- shrdl $5,%r13d,%r13d
- xorl %eax,%r14d
- andl %r8d,%r12d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 128-128(%rdi),%xmm10
- xorl %r8d,%r13d
- addl 32(%rsp),%r11d
- movl %eax,%r15d
- shrdl $11,%r14d,%r14d
- xorl %r10d,%r12d
- xorl %ebx,%r15d
- shrdl $6,%r13d,%r13d
- addl %r12d,%r11d
- andl %r15d,%esi
- xorl %eax,%r14d
- addl %r13d,%r11d
- xorl %ebx,%esi
- addl %r11d,%edx
- shrdl $2,%r14d,%r14d
- addl %esi,%r11d
- movl %edx,%r13d
- addl %r11d,%r14d
- shrdl $14,%r13d,%r13d
- movl %r14d,%r11d
- movl %r8d,%r12d
- xorl %edx,%r13d
- shrdl $9,%r14d,%r14d
- xorl %r9d,%r12d
- shrdl $5,%r13d,%r13d
- xorl %r11d,%r14d
- andl %edx,%r12d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 144-128(%rdi),%xmm10
- xorl %edx,%r13d
- addl 36(%rsp),%r10d
- movl %r11d,%esi
- shrdl $11,%r14d,%r14d
- xorl %r9d,%r12d
- xorl %eax,%esi
- shrdl $6,%r13d,%r13d
- addl %r12d,%r10d
- andl %esi,%r15d
- xorl %r11d,%r14d
- addl %r13d,%r10d
- xorl %eax,%r15d
- addl %r10d,%ecx
- shrdl $2,%r14d,%r14d
- addl %r15d,%r10d
- movl %ecx,%r13d
- addl %r10d,%r14d
- shrdl $14,%r13d,%r13d
- movl %r14d,%r10d
- movl %edx,%r12d
- xorl %ecx,%r13d
- shrdl $9,%r14d,%r14d
- xorl %r8d,%r12d
- shrdl $5,%r13d,%r13d
- xorl %r10d,%r14d
- andl %ecx,%r12d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 160-128(%rdi),%xmm10
- xorl %ecx,%r13d
- addl 40(%rsp),%r9d
- movl %r10d,%r15d
- shrdl $11,%r14d,%r14d
- xorl %r8d,%r12d
- xorl %r11d,%r15d
- shrdl $6,%r13d,%r13d
- addl %r12d,%r9d
- andl %r15d,%esi
- xorl %r10d,%r14d
- addl %r13d,%r9d
- xorl %r11d,%esi
- addl %r9d,%ebx
- shrdl $2,%r14d,%r14d
- addl %esi,%r9d
- movl %ebx,%r13d
- addl %r9d,%r14d
- shrdl $14,%r13d,%r13d
- movl %r14d,%r9d
- movl %ecx,%r12d
- xorl %ebx,%r13d
- shrdl $9,%r14d,%r14d
- xorl %edx,%r12d
- shrdl $5,%r13d,%r13d
- xorl %r9d,%r14d
- andl %ebx,%r12d
- vaesenclast %xmm10,%xmm9,%xmm11
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 176-128(%rdi),%xmm10
- xorl %ebx,%r13d
- addl 44(%rsp),%r8d
- movl %r9d,%esi
- shrdl $11,%r14d,%r14d
- xorl %edx,%r12d
- xorl %r10d,%esi
- shrdl $6,%r13d,%r13d
- addl %r12d,%r8d
- andl %esi,%r15d
- xorl %r9d,%r14d
- addl %r13d,%r8d
- xorl %r10d,%r15d
- addl %r8d,%eax
- shrdl $2,%r14d,%r14d
- addl %r15d,%r8d
- movl %eax,%r13d
- addl %r8d,%r14d
- shrdl $14,%r13d,%r13d
- movl %r14d,%r8d
- movl %ebx,%r12d
- xorl %eax,%r13d
- shrdl $9,%r14d,%r14d
- xorl %ecx,%r12d
- shrdl $5,%r13d,%r13d
- xorl %r8d,%r14d
- andl %eax,%r12d
- vpand %xmm12,%xmm11,%xmm8
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 192-128(%rdi),%xmm10
- xorl %eax,%r13d
- addl 48(%rsp),%edx
- movl %r8d,%r15d
- shrdl $11,%r14d,%r14d
- xorl %ecx,%r12d
- xorl %r9d,%r15d
- shrdl $6,%r13d,%r13d
- addl %r12d,%edx
- andl %r15d,%esi
- xorl %r8d,%r14d
- addl %r13d,%edx
- xorl %r9d,%esi
- addl %edx,%r11d
- shrdl $2,%r14d,%r14d
- addl %esi,%edx
- movl %r11d,%r13d
- addl %edx,%r14d
- shrdl $14,%r13d,%r13d
- movl %r14d,%edx
- movl %eax,%r12d
- xorl %r11d,%r13d
- shrdl $9,%r14d,%r14d
- xorl %ebx,%r12d
- shrdl $5,%r13d,%r13d
- xorl %edx,%r14d
- andl %r11d,%r12d
- vaesenclast %xmm10,%xmm9,%xmm11
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 208-128(%rdi),%xmm10
- xorl %r11d,%r13d
- addl 52(%rsp),%ecx
- movl %edx,%esi
- shrdl $11,%r14d,%r14d
- xorl %ebx,%r12d
- xorl %r8d,%esi
- shrdl $6,%r13d,%r13d
- addl %r12d,%ecx
- andl %esi,%r15d
- xorl %edx,%r14d
- addl %r13d,%ecx
- xorl %r8d,%r15d
- addl %ecx,%r10d
- shrdl $2,%r14d,%r14d
- addl %r15d,%ecx
- movl %r10d,%r13d
- addl %ecx,%r14d
- shrdl $14,%r13d,%r13d
- movl %r14d,%ecx
- movl %r11d,%r12d
- xorl %r10d,%r13d
- shrdl $9,%r14d,%r14d
- xorl %eax,%r12d
- shrdl $5,%r13d,%r13d
- xorl %ecx,%r14d
- andl %r10d,%r12d
- vpand %xmm13,%xmm11,%xmm11
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 224-128(%rdi),%xmm10
- xorl %r10d,%r13d
- addl 56(%rsp),%ebx
- movl %ecx,%r15d
- shrdl $11,%r14d,%r14d
- xorl %eax,%r12d
- xorl %edx,%r15d
- shrdl $6,%r13d,%r13d
- addl %r12d,%ebx
- andl %r15d,%esi
- xorl %ecx,%r14d
- addl %r13d,%ebx
- xorl %edx,%esi
- addl %ebx,%r9d
- shrdl $2,%r14d,%r14d
- addl %esi,%ebx
- movl %r9d,%r13d
- addl %ebx,%r14d
- shrdl $14,%r13d,%r13d
- movl %r14d,%ebx
- movl %r10d,%r12d
- xorl %r9d,%r13d
- shrdl $9,%r14d,%r14d
- xorl %r11d,%r12d
- shrdl $5,%r13d,%r13d
- xorl %ebx,%r14d
- andl %r9d,%r12d
- vpor %xmm11,%xmm8,%xmm8
- vaesenclast %xmm10,%xmm9,%xmm11
- vmovdqu 0-128(%rdi),%xmm10
- xorl %r9d,%r13d
- addl 60(%rsp),%eax
- movl %ebx,%esi
- shrdl $11,%r14d,%r14d
- xorl %r11d,%r12d
- xorl %ecx,%esi
- shrdl $6,%r13d,%r13d
- addl %r12d,%eax
- andl %esi,%r15d
- xorl %ebx,%r14d
- addl %r13d,%eax
- xorl %ecx,%r15d
- addl %eax,%r8d
- shrdl $2,%r14d,%r14d
- addl %r15d,%eax
- movl %r8d,%r13d
- addl %eax,%r14d
- movq 64+0(%rsp),%r12
- movq 64+8(%rsp),%r13
- movq 64+40(%rsp),%r15
- movq 64+48(%rsp),%rsi
-
- vpand %xmm14,%xmm11,%xmm11
- movl %r14d,%eax
- vpor %xmm11,%xmm8,%xmm8
- vmovdqu %xmm8,(%r12,%r13,1)
- leaq 16(%r12),%r12
-
- addl 0(%r15),%eax
- addl 4(%r15),%ebx
- addl 8(%r15),%ecx
- addl 12(%r15),%edx
- addl 16(%r15),%r8d
- addl 20(%r15),%r9d
- addl 24(%r15),%r10d
- addl 28(%r15),%r11d
-
- cmpq 64+16(%rsp),%r12
-
- movl %eax,0(%r15)
- movl %ebx,4(%r15)
- movl %ecx,8(%r15)
- movl %edx,12(%r15)
- movl %r8d,16(%r15)
- movl %r9d,20(%r15)
- movl %r10d,24(%r15)
- movl %r11d,28(%r15)
- jb .Lloop_avx
-
- movq 64+32(%rsp),%r8
- movq 120(%rsp),%rsi
-.cfi_def_cfa %rsi,8
- vmovdqu %xmm8,(%r8)
- vzeroall
- movq -48(%rsi),%r15
-.cfi_restore %r15
- movq -40(%rsi),%r14
-.cfi_restore %r14
- movq -32(%rsi),%r13
-.cfi_restore %r13
- movq -24(%rsi),%r12
-.cfi_restore %r12
- movq -16(%rsi),%rbp
-.cfi_restore %rbp
- movq -8(%rsi),%rbx
-.cfi_restore %rbx
- leaq (%rsi),%rsp
-.cfi_def_cfa_register %rsp
-.Lepilogue_avx:
- .byte 0xf3,0xc3
-.cfi_endproc
-.size aesni_cbc_sha256_enc_avx,.-aesni_cbc_sha256_enc_avx
-.type aesni_cbc_sha256_enc_avx2,@function
-.align 64
-aesni_cbc_sha256_enc_avx2:
-.cfi_startproc
-.Lavx2_shortcut:
- movq 8(%rsp),%r10
- movq %rsp,%rax
-.cfi_def_cfa_register %rax
- pushq %rbx
-.cfi_offset %rbx,-16
- pushq %rbp
-.cfi_offset %rbp,-24
- pushq %r12
-.cfi_offset %r12,-32
- pushq %r13
-.cfi_offset %r13,-40
- pushq %r14
-.cfi_offset %r14,-48
- pushq %r15
-.cfi_offset %r15,-56
- subq $576,%rsp
- andq $-1024,%rsp
- addq $448,%rsp
-
- shlq $6,%rdx
- subq %rdi,%rsi
- subq %rdi,%r10
- addq %rdi,%rdx
-
-
-
- movq %rdx,64+16(%rsp)
-
- movq %r8,64+32(%rsp)
- movq %r9,64+40(%rsp)
- movq %r10,64+48(%rsp)
- movq %rax,120(%rsp)
-.cfi_escape 0x0f,0x06,0x77,0xf8,0x00,0x06,0x23,0x08
-.Lprologue_avx2:
- vzeroall
-
- movq %rdi,%r13
- vpinsrq $1,%rsi,%xmm15,%xmm15
- leaq 128(%rcx),%rdi
- leaq K256+544(%rip),%r12
- movl 240-128(%rdi),%r14d
- movq %r9,%r15
- movq %r10,%rsi
- vmovdqu (%r8),%xmm8
- leaq -9(%r14),%r14
-
- vmovdqa 0(%r12,%r14,8),%xmm14
- vmovdqa 16(%r12,%r14,8),%xmm13
- vmovdqa 32(%r12,%r14,8),%xmm12
-
- subq $-64,%r13
- movl 0(%r15),%eax
- leaq (%rsi,%r13,1),%r12
- movl 4(%r15),%ebx
- cmpq %rdx,%r13
- movl 8(%r15),%ecx
- cmoveq %rsp,%r12
- movl 12(%r15),%edx
- movl 16(%r15),%r8d
- movl 20(%r15),%r9d
- movl 24(%r15),%r10d
- movl 28(%r15),%r11d
- vmovdqu 0-128(%rdi),%xmm10
- jmp .Loop_avx2
-.align 16
-.Loop_avx2:
- vmovdqa K256+512(%rip),%ymm7
- vmovdqu -64+0(%rsi,%r13,1),%xmm0
- vmovdqu -64+16(%rsi,%r13,1),%xmm1
- vmovdqu -64+32(%rsi,%r13,1),%xmm2
- vmovdqu -64+48(%rsi,%r13,1),%xmm3
-
- vinserti128 $1,(%r12),%ymm0,%ymm0
- vinserti128 $1,16(%r12),%ymm1,%ymm1
- vpshufb %ymm7,%ymm0,%ymm0
- vinserti128 $1,32(%r12),%ymm2,%ymm2
- vpshufb %ymm7,%ymm1,%ymm1
- vinserti128 $1,48(%r12),%ymm3,%ymm3
-
- leaq K256(%rip),%rbp
- vpshufb %ymm7,%ymm2,%ymm2
- leaq -64(%r13),%r13
- vpaddd 0(%rbp),%ymm0,%ymm4
- vpshufb %ymm7,%ymm3,%ymm3
- vpaddd 32(%rbp),%ymm1,%ymm5
- vpaddd 64(%rbp),%ymm2,%ymm6
- vpaddd 96(%rbp),%ymm3,%ymm7
- vmovdqa %ymm4,0(%rsp)
- xorl %r14d,%r14d
- vmovdqa %ymm5,32(%rsp)
- leaq -64(%rsp),%rsp
- movl %ebx,%esi
- vmovdqa %ymm6,0(%rsp)
- xorl %ecx,%esi
- vmovdqa %ymm7,32(%rsp)
- movl %r9d,%r12d
- subq $-32*4,%rbp
- jmp .Lavx2_00_47
-
-.align 16
-.Lavx2_00_47:
- vmovdqu (%r13),%xmm9
- vpinsrq $0,%r13,%xmm15,%xmm15
- leaq -64(%rsp),%rsp
- vpalignr $4,%ymm0,%ymm1,%ymm4
- addl 0+128(%rsp),%r11d
- andl %r8d,%r12d
- rorxl $25,%r8d,%r13d
- vpalignr $4,%ymm2,%ymm3,%ymm7
- rorxl $11,%r8d,%r15d
- leal (%rax,%r14,1),%eax
- leal (%r11,%r12,1),%r11d
- vpsrld $7,%ymm4,%ymm6
- andnl %r10d,%r8d,%r12d
- xorl %r15d,%r13d
- rorxl $6,%r8d,%r14d
- vpaddd %ymm7,%ymm0,%ymm0
- leal (%r11,%r12,1),%r11d
- xorl %r14d,%r13d
- movl %eax,%r15d
- vpsrld $3,%ymm4,%ymm7
- rorxl $22,%eax,%r12d
- leal (%r11,%r13,1),%r11d
- xorl %ebx,%r15d
- vpslld $14,%ymm4,%ymm5
- rorxl $13,%eax,%r14d
- rorxl $2,%eax,%r13d
- leal (%rdx,%r11,1),%edx
- vpxor %ymm6,%ymm7,%ymm4
- andl %r15d,%esi
- vpxor %xmm10,%xmm9,%xmm9
- vmovdqu 16-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %ebx,%esi
- vpshufd $250,%ymm3,%ymm7
- xorl %r13d,%r14d
- leal (%r11,%rsi,1),%r11d
- movl %r8d,%r12d
- vpsrld $11,%ymm6,%ymm6
- addl 4+128(%rsp),%r10d
- andl %edx,%r12d
- rorxl $25,%edx,%r13d
- vpxor %ymm5,%ymm4,%ymm4
- rorxl $11,%edx,%esi
- leal (%r11,%r14,1),%r11d
- leal (%r10,%r12,1),%r10d
- vpslld $11,%ymm5,%ymm5
- andnl %r9d,%edx,%r12d
- xorl %esi,%r13d
- rorxl $6,%edx,%r14d
- vpxor %ymm6,%ymm4,%ymm4
- leal (%r10,%r12,1),%r10d
- xorl %r14d,%r13d
- movl %r11d,%esi
- vpsrld $10,%ymm7,%ymm6
- rorxl $22,%r11d,%r12d
- leal (%r10,%r13,1),%r10d
- xorl %eax,%esi
- vpxor %ymm5,%ymm4,%ymm4
- rorxl $13,%r11d,%r14d
- rorxl $2,%r11d,%r13d
- leal (%rcx,%r10,1),%ecx
- vpsrlq $17,%ymm7,%ymm7
- andl %esi,%r15d
- vpxor %xmm8,%xmm9,%xmm9
- xorl %r12d,%r14d
- xorl %eax,%r15d
- vpaddd %ymm4,%ymm0,%ymm0
- xorl %r13d,%r14d
- leal (%r10,%r15,1),%r10d
- movl %edx,%r12d
- vpxor %ymm7,%ymm6,%ymm6
- addl 8+128(%rsp),%r9d
- andl %ecx,%r12d
- rorxl $25,%ecx,%r13d
- vpsrlq $2,%ymm7,%ymm7
- rorxl $11,%ecx,%r15d
- leal (%r10,%r14,1),%r10d
- leal (%r9,%r12,1),%r9d
- vpxor %ymm7,%ymm6,%ymm6
- andnl %r8d,%ecx,%r12d
- xorl %r15d,%r13d
- rorxl $6,%ecx,%r14d
- vpshufd $132,%ymm6,%ymm6
- leal (%r9,%r12,1),%r9d
- xorl %r14d,%r13d
- movl %r10d,%r15d
- vpsrldq $8,%ymm6,%ymm6
- rorxl $22,%r10d,%r12d
- leal (%r9,%r13,1),%r9d
- xorl %r11d,%r15d
- vpaddd %ymm6,%ymm0,%ymm0
- rorxl $13,%r10d,%r14d
- rorxl $2,%r10d,%r13d
- leal (%rbx,%r9,1),%ebx
- vpshufd $80,%ymm0,%ymm7
- andl %r15d,%esi
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 32-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %r11d,%esi
- vpsrld $10,%ymm7,%ymm6
- xorl %r13d,%r14d
- leal (%r9,%rsi,1),%r9d
- movl %ecx,%r12d
- vpsrlq $17,%ymm7,%ymm7
- addl 12+128(%rsp),%r8d
- andl %ebx,%r12d
- rorxl $25,%ebx,%r13d
- vpxor %ymm7,%ymm6,%ymm6
- rorxl $11,%ebx,%esi
- leal (%r9,%r14,1),%r9d
- leal (%r8,%r12,1),%r8d
- vpsrlq $2,%ymm7,%ymm7
- andnl %edx,%ebx,%r12d
- xorl %esi,%r13d
- rorxl $6,%ebx,%r14d
- vpxor %ymm7,%ymm6,%ymm6
- leal (%r8,%r12,1),%r8d
- xorl %r14d,%r13d
- movl %r9d,%esi
- vpshufd $232,%ymm6,%ymm6
- rorxl $22,%r9d,%r12d
- leal (%r8,%r13,1),%r8d
- xorl %r10d,%esi
- vpslldq $8,%ymm6,%ymm6
- rorxl $13,%r9d,%r14d
- rorxl $2,%r9d,%r13d
- leal (%rax,%r8,1),%eax
- vpaddd %ymm6,%ymm0,%ymm0
- andl %esi,%r15d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 48-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %r10d,%r15d
- vpaddd 0(%rbp),%ymm0,%ymm6
- xorl %r13d,%r14d
- leal (%r8,%r15,1),%r8d
- movl %ebx,%r12d
- vmovdqa %ymm6,0(%rsp)
- vpalignr $4,%ymm1,%ymm2,%ymm4
- addl 32+128(%rsp),%edx
- andl %eax,%r12d
- rorxl $25,%eax,%r13d
- vpalignr $4,%ymm3,%ymm0,%ymm7
- rorxl $11,%eax,%r15d
- leal (%r8,%r14,1),%r8d
- leal (%rdx,%r12,1),%edx
- vpsrld $7,%ymm4,%ymm6
- andnl %ecx,%eax,%r12d
- xorl %r15d,%r13d
- rorxl $6,%eax,%r14d
- vpaddd %ymm7,%ymm1,%ymm1
- leal (%rdx,%r12,1),%edx
- xorl %r14d,%r13d
- movl %r8d,%r15d
- vpsrld $3,%ymm4,%ymm7
- rorxl $22,%r8d,%r12d
- leal (%rdx,%r13,1),%edx
- xorl %r9d,%r15d
- vpslld $14,%ymm4,%ymm5
- rorxl $13,%r8d,%r14d
- rorxl $2,%r8d,%r13d
- leal (%r11,%rdx,1),%r11d
- vpxor %ymm6,%ymm7,%ymm4
- andl %r15d,%esi
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 64-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %r9d,%esi
- vpshufd $250,%ymm0,%ymm7
- xorl %r13d,%r14d
- leal (%rdx,%rsi,1),%edx
- movl %eax,%r12d
- vpsrld $11,%ymm6,%ymm6
- addl 36+128(%rsp),%ecx
- andl %r11d,%r12d
- rorxl $25,%r11d,%r13d
- vpxor %ymm5,%ymm4,%ymm4
- rorxl $11,%r11d,%esi
- leal (%rdx,%r14,1),%edx
- leal (%rcx,%r12,1),%ecx
- vpslld $11,%ymm5,%ymm5
- andnl %ebx,%r11d,%r12d
- xorl %esi,%r13d
- rorxl $6,%r11d,%r14d
- vpxor %ymm6,%ymm4,%ymm4
- leal (%rcx,%r12,1),%ecx
- xorl %r14d,%r13d
- movl %edx,%esi
- vpsrld $10,%ymm7,%ymm6
- rorxl $22,%edx,%r12d
- leal (%rcx,%r13,1),%ecx
- xorl %r8d,%esi
- vpxor %ymm5,%ymm4,%ymm4
- rorxl $13,%edx,%r14d
- rorxl $2,%edx,%r13d
- leal (%r10,%rcx,1),%r10d
- vpsrlq $17,%ymm7,%ymm7
- andl %esi,%r15d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 80-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %r8d,%r15d
- vpaddd %ymm4,%ymm1,%ymm1
- xorl %r13d,%r14d
- leal (%rcx,%r15,1),%ecx
- movl %r11d,%r12d
- vpxor %ymm7,%ymm6,%ymm6
- addl 40+128(%rsp),%ebx
- andl %r10d,%r12d
- rorxl $25,%r10d,%r13d
- vpsrlq $2,%ymm7,%ymm7
- rorxl $11,%r10d,%r15d
- leal (%rcx,%r14,1),%ecx
- leal (%rbx,%r12,1),%ebx
- vpxor %ymm7,%ymm6,%ymm6
- andnl %eax,%r10d,%r12d
- xorl %r15d,%r13d
- rorxl $6,%r10d,%r14d
- vpshufd $132,%ymm6,%ymm6
- leal (%rbx,%r12,1),%ebx
- xorl %r14d,%r13d
- movl %ecx,%r15d
- vpsrldq $8,%ymm6,%ymm6
- rorxl $22,%ecx,%r12d
- leal (%rbx,%r13,1),%ebx
- xorl %edx,%r15d
- vpaddd %ymm6,%ymm1,%ymm1
- rorxl $13,%ecx,%r14d
- rorxl $2,%ecx,%r13d
- leal (%r9,%rbx,1),%r9d
- vpshufd $80,%ymm1,%ymm7
- andl %r15d,%esi
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 96-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %edx,%esi
- vpsrld $10,%ymm7,%ymm6
- xorl %r13d,%r14d
- leal (%rbx,%rsi,1),%ebx
- movl %r10d,%r12d
- vpsrlq $17,%ymm7,%ymm7
- addl 44+128(%rsp),%eax
- andl %r9d,%r12d
- rorxl $25,%r9d,%r13d
- vpxor %ymm7,%ymm6,%ymm6
- rorxl $11,%r9d,%esi
- leal (%rbx,%r14,1),%ebx
- leal (%rax,%r12,1),%eax
- vpsrlq $2,%ymm7,%ymm7
- andnl %r11d,%r9d,%r12d
- xorl %esi,%r13d
- rorxl $6,%r9d,%r14d
- vpxor %ymm7,%ymm6,%ymm6
- leal (%rax,%r12,1),%eax
- xorl %r14d,%r13d
- movl %ebx,%esi
- vpshufd $232,%ymm6,%ymm6
- rorxl $22,%ebx,%r12d
- leal (%rax,%r13,1),%eax
- xorl %ecx,%esi
- vpslldq $8,%ymm6,%ymm6
- rorxl $13,%ebx,%r14d
- rorxl $2,%ebx,%r13d
- leal (%r8,%rax,1),%r8d
- vpaddd %ymm6,%ymm1,%ymm1
- andl %esi,%r15d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 112-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %ecx,%r15d
- vpaddd 32(%rbp),%ymm1,%ymm6
- xorl %r13d,%r14d
- leal (%rax,%r15,1),%eax
- movl %r9d,%r12d
- vmovdqa %ymm6,32(%rsp)
- leaq -64(%rsp),%rsp
- vpalignr $4,%ymm2,%ymm3,%ymm4
- addl 0+128(%rsp),%r11d
- andl %r8d,%r12d
- rorxl $25,%r8d,%r13d
- vpalignr $4,%ymm0,%ymm1,%ymm7
- rorxl $11,%r8d,%r15d
- leal (%rax,%r14,1),%eax
- leal (%r11,%r12,1),%r11d
- vpsrld $7,%ymm4,%ymm6
- andnl %r10d,%r8d,%r12d
- xorl %r15d,%r13d
- rorxl $6,%r8d,%r14d
- vpaddd %ymm7,%ymm2,%ymm2
- leal (%r11,%r12,1),%r11d
- xorl %r14d,%r13d
- movl %eax,%r15d
- vpsrld $3,%ymm4,%ymm7
- rorxl $22,%eax,%r12d
- leal (%r11,%r13,1),%r11d
- xorl %ebx,%r15d
- vpslld $14,%ymm4,%ymm5
- rorxl $13,%eax,%r14d
- rorxl $2,%eax,%r13d
- leal (%rdx,%r11,1),%edx
- vpxor %ymm6,%ymm7,%ymm4
- andl %r15d,%esi
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 128-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %ebx,%esi
- vpshufd $250,%ymm1,%ymm7
- xorl %r13d,%r14d
- leal (%r11,%rsi,1),%r11d
- movl %r8d,%r12d
- vpsrld $11,%ymm6,%ymm6
- addl 4+128(%rsp),%r10d
- andl %edx,%r12d
- rorxl $25,%edx,%r13d
- vpxor %ymm5,%ymm4,%ymm4
- rorxl $11,%edx,%esi
- leal (%r11,%r14,1),%r11d
- leal (%r10,%r12,1),%r10d
- vpslld $11,%ymm5,%ymm5
- andnl %r9d,%edx,%r12d
- xorl %esi,%r13d
- rorxl $6,%edx,%r14d
- vpxor %ymm6,%ymm4,%ymm4
- leal (%r10,%r12,1),%r10d
- xorl %r14d,%r13d
- movl %r11d,%esi
- vpsrld $10,%ymm7,%ymm6
- rorxl $22,%r11d,%r12d
- leal (%r10,%r13,1),%r10d
- xorl %eax,%esi
- vpxor %ymm5,%ymm4,%ymm4
- rorxl $13,%r11d,%r14d
- rorxl $2,%r11d,%r13d
- leal (%rcx,%r10,1),%ecx
- vpsrlq $17,%ymm7,%ymm7
- andl %esi,%r15d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 144-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %eax,%r15d
- vpaddd %ymm4,%ymm2,%ymm2
- xorl %r13d,%r14d
- leal (%r10,%r15,1),%r10d
- movl %edx,%r12d
- vpxor %ymm7,%ymm6,%ymm6
- addl 8+128(%rsp),%r9d
- andl %ecx,%r12d
- rorxl $25,%ecx,%r13d
- vpsrlq $2,%ymm7,%ymm7
- rorxl $11,%ecx,%r15d
- leal (%r10,%r14,1),%r10d
- leal (%r9,%r12,1),%r9d
- vpxor %ymm7,%ymm6,%ymm6
- andnl %r8d,%ecx,%r12d
- xorl %r15d,%r13d
- rorxl $6,%ecx,%r14d
- vpshufd $132,%ymm6,%ymm6
- leal (%r9,%r12,1),%r9d
- xorl %r14d,%r13d
- movl %r10d,%r15d
- vpsrldq $8,%ymm6,%ymm6
- rorxl $22,%r10d,%r12d
- leal (%r9,%r13,1),%r9d
- xorl %r11d,%r15d
- vpaddd %ymm6,%ymm2,%ymm2
- rorxl $13,%r10d,%r14d
- rorxl $2,%r10d,%r13d
- leal (%rbx,%r9,1),%ebx
- vpshufd $80,%ymm2,%ymm7
- andl %r15d,%esi
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 160-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %r11d,%esi
- vpsrld $10,%ymm7,%ymm6
- xorl %r13d,%r14d
- leal (%r9,%rsi,1),%r9d
- movl %ecx,%r12d
- vpsrlq $17,%ymm7,%ymm7
- addl 12+128(%rsp),%r8d
- andl %ebx,%r12d
- rorxl $25,%ebx,%r13d
- vpxor %ymm7,%ymm6,%ymm6
- rorxl $11,%ebx,%esi
- leal (%r9,%r14,1),%r9d
- leal (%r8,%r12,1),%r8d
- vpsrlq $2,%ymm7,%ymm7
- andnl %edx,%ebx,%r12d
- xorl %esi,%r13d
- rorxl $6,%ebx,%r14d
- vpxor %ymm7,%ymm6,%ymm6
- leal (%r8,%r12,1),%r8d
- xorl %r14d,%r13d
- movl %r9d,%esi
- vpshufd $232,%ymm6,%ymm6
- rorxl $22,%r9d,%r12d
- leal (%r8,%r13,1),%r8d
- xorl %r10d,%esi
- vpslldq $8,%ymm6,%ymm6
- rorxl $13,%r9d,%r14d
- rorxl $2,%r9d,%r13d
- leal (%rax,%r8,1),%eax
- vpaddd %ymm6,%ymm2,%ymm2
- andl %esi,%r15d
- vaesenclast %xmm10,%xmm9,%xmm11
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 176-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %r10d,%r15d
- vpaddd 64(%rbp),%ymm2,%ymm6
- xorl %r13d,%r14d
- leal (%r8,%r15,1),%r8d
- movl %ebx,%r12d
- vmovdqa %ymm6,0(%rsp)
- vpalignr $4,%ymm3,%ymm0,%ymm4
- addl 32+128(%rsp),%edx
- andl %eax,%r12d
- rorxl $25,%eax,%r13d
- vpalignr $4,%ymm1,%ymm2,%ymm7
- rorxl $11,%eax,%r15d
- leal (%r8,%r14,1),%r8d
- leal (%rdx,%r12,1),%edx
- vpsrld $7,%ymm4,%ymm6
- andnl %ecx,%eax,%r12d
- xorl %r15d,%r13d
- rorxl $6,%eax,%r14d
- vpaddd %ymm7,%ymm3,%ymm3
- leal (%rdx,%r12,1),%edx
- xorl %r14d,%r13d
- movl %r8d,%r15d
- vpsrld $3,%ymm4,%ymm7
- rorxl $22,%r8d,%r12d
- leal (%rdx,%r13,1),%edx
- xorl %r9d,%r15d
- vpslld $14,%ymm4,%ymm5
- rorxl $13,%r8d,%r14d
- rorxl $2,%r8d,%r13d
- leal (%r11,%rdx,1),%r11d
- vpxor %ymm6,%ymm7,%ymm4
- andl %r15d,%esi
- vpand %xmm12,%xmm11,%xmm8
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 192-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %r9d,%esi
- vpshufd $250,%ymm2,%ymm7
- xorl %r13d,%r14d
- leal (%rdx,%rsi,1),%edx
- movl %eax,%r12d
- vpsrld $11,%ymm6,%ymm6
- addl 36+128(%rsp),%ecx
- andl %r11d,%r12d
- rorxl $25,%r11d,%r13d
- vpxor %ymm5,%ymm4,%ymm4
- rorxl $11,%r11d,%esi
- leal (%rdx,%r14,1),%edx
- leal (%rcx,%r12,1),%ecx
- vpslld $11,%ymm5,%ymm5
- andnl %ebx,%r11d,%r12d
- xorl %esi,%r13d
- rorxl $6,%r11d,%r14d
- vpxor %ymm6,%ymm4,%ymm4
- leal (%rcx,%r12,1),%ecx
- xorl %r14d,%r13d
- movl %edx,%esi
- vpsrld $10,%ymm7,%ymm6
- rorxl $22,%edx,%r12d
- leal (%rcx,%r13,1),%ecx
- xorl %r8d,%esi
- vpxor %ymm5,%ymm4,%ymm4
- rorxl $13,%edx,%r14d
- rorxl $2,%edx,%r13d
- leal (%r10,%rcx,1),%r10d
- vpsrlq $17,%ymm7,%ymm7
- andl %esi,%r15d
- vaesenclast %xmm10,%xmm9,%xmm11
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 208-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %r8d,%r15d
- vpaddd %ymm4,%ymm3,%ymm3
- xorl %r13d,%r14d
- leal (%rcx,%r15,1),%ecx
- movl %r11d,%r12d
- vpxor %ymm7,%ymm6,%ymm6
- addl 40+128(%rsp),%ebx
- andl %r10d,%r12d
- rorxl $25,%r10d,%r13d
- vpsrlq $2,%ymm7,%ymm7
- rorxl $11,%r10d,%r15d
- leal (%rcx,%r14,1),%ecx
- leal (%rbx,%r12,1),%ebx
- vpxor %ymm7,%ymm6,%ymm6
- andnl %eax,%r10d,%r12d
- xorl %r15d,%r13d
- rorxl $6,%r10d,%r14d
- vpshufd $132,%ymm6,%ymm6
- leal (%rbx,%r12,1),%ebx
- xorl %r14d,%r13d
- movl %ecx,%r15d
- vpsrldq $8,%ymm6,%ymm6
- rorxl $22,%ecx,%r12d
- leal (%rbx,%r13,1),%ebx
- xorl %edx,%r15d
- vpaddd %ymm6,%ymm3,%ymm3
- rorxl $13,%ecx,%r14d
- rorxl $2,%ecx,%r13d
- leal (%r9,%rbx,1),%r9d
- vpshufd $80,%ymm3,%ymm7
- andl %r15d,%esi
- vpand %xmm13,%xmm11,%xmm11
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 224-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %edx,%esi
- vpsrld $10,%ymm7,%ymm6
- xorl %r13d,%r14d
- leal (%rbx,%rsi,1),%ebx
- movl %r10d,%r12d
- vpsrlq $17,%ymm7,%ymm7
- addl 44+128(%rsp),%eax
- andl %r9d,%r12d
- rorxl $25,%r9d,%r13d
- vpxor %ymm7,%ymm6,%ymm6
- rorxl $11,%r9d,%esi
- leal (%rbx,%r14,1),%ebx
- leal (%rax,%r12,1),%eax
- vpsrlq $2,%ymm7,%ymm7
- andnl %r11d,%r9d,%r12d
- xorl %esi,%r13d
- rorxl $6,%r9d,%r14d
- vpxor %ymm7,%ymm6,%ymm6
- leal (%rax,%r12,1),%eax
- xorl %r14d,%r13d
- movl %ebx,%esi
- vpshufd $232,%ymm6,%ymm6
- rorxl $22,%ebx,%r12d
- leal (%rax,%r13,1),%eax
- xorl %ecx,%esi
- vpslldq $8,%ymm6,%ymm6
- rorxl $13,%ebx,%r14d
- rorxl $2,%ebx,%r13d
- leal (%r8,%rax,1),%r8d
- vpaddd %ymm6,%ymm3,%ymm3
- andl %esi,%r15d
- vpor %xmm11,%xmm8,%xmm8
- vaesenclast %xmm10,%xmm9,%xmm11
- vmovdqu 0-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %ecx,%r15d
- vpaddd 96(%rbp),%ymm3,%ymm6
- xorl %r13d,%r14d
- leal (%rax,%r15,1),%eax
- movl %r9d,%r12d
- vmovdqa %ymm6,32(%rsp)
- vmovq %xmm15,%r13
- vpextrq $1,%xmm15,%r15
- vpand %xmm14,%xmm11,%xmm11
- vpor %xmm11,%xmm8,%xmm8
- vmovdqu %xmm8,(%r15,%r13,1)
- leaq 16(%r13),%r13
- leaq 128(%rbp),%rbp
- cmpb $0,3(%rbp)
- jne .Lavx2_00_47
- vmovdqu (%r13),%xmm9
- vpinsrq $0,%r13,%xmm15,%xmm15
- addl 0+64(%rsp),%r11d
- andl %r8d,%r12d
- rorxl $25,%r8d,%r13d
- rorxl $11,%r8d,%r15d
- leal (%rax,%r14,1),%eax
- leal (%r11,%r12,1),%r11d
- andnl %r10d,%r8d,%r12d
- xorl %r15d,%r13d
- rorxl $6,%r8d,%r14d
- leal (%r11,%r12,1),%r11d
- xorl %r14d,%r13d
- movl %eax,%r15d
- rorxl $22,%eax,%r12d
- leal (%r11,%r13,1),%r11d
- xorl %ebx,%r15d
- rorxl $13,%eax,%r14d
- rorxl $2,%eax,%r13d
- leal (%rdx,%r11,1),%edx
- andl %r15d,%esi
- vpxor %xmm10,%xmm9,%xmm9
- vmovdqu 16-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %ebx,%esi
- xorl %r13d,%r14d
- leal (%r11,%rsi,1),%r11d
- movl %r8d,%r12d
- addl 4+64(%rsp),%r10d
- andl %edx,%r12d
- rorxl $25,%edx,%r13d
- rorxl $11,%edx,%esi
- leal (%r11,%r14,1),%r11d
- leal (%r10,%r12,1),%r10d
- andnl %r9d,%edx,%r12d
- xorl %esi,%r13d
- rorxl $6,%edx,%r14d
- leal (%r10,%r12,1),%r10d
- xorl %r14d,%r13d
- movl %r11d,%esi
- rorxl $22,%r11d,%r12d
- leal (%r10,%r13,1),%r10d
- xorl %eax,%esi
- rorxl $13,%r11d,%r14d
- rorxl $2,%r11d,%r13d
- leal (%rcx,%r10,1),%ecx
- andl %esi,%r15d
- vpxor %xmm8,%xmm9,%xmm9
- xorl %r12d,%r14d
- xorl %eax,%r15d
- xorl %r13d,%r14d
- leal (%r10,%r15,1),%r10d
- movl %edx,%r12d
- addl 8+64(%rsp),%r9d
- andl %ecx,%r12d
- rorxl $25,%ecx,%r13d
- rorxl $11,%ecx,%r15d
- leal (%r10,%r14,1),%r10d
- leal (%r9,%r12,1),%r9d
- andnl %r8d,%ecx,%r12d
- xorl %r15d,%r13d
- rorxl $6,%ecx,%r14d
- leal (%r9,%r12,1),%r9d
- xorl %r14d,%r13d
- movl %r10d,%r15d
- rorxl $22,%r10d,%r12d
- leal (%r9,%r13,1),%r9d
- xorl %r11d,%r15d
- rorxl $13,%r10d,%r14d
- rorxl $2,%r10d,%r13d
- leal (%rbx,%r9,1),%ebx
- andl %r15d,%esi
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 32-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %r11d,%esi
- xorl %r13d,%r14d
- leal (%r9,%rsi,1),%r9d
- movl %ecx,%r12d
- addl 12+64(%rsp),%r8d
- andl %ebx,%r12d
- rorxl $25,%ebx,%r13d
- rorxl $11,%ebx,%esi
- leal (%r9,%r14,1),%r9d
- leal (%r8,%r12,1),%r8d
- andnl %edx,%ebx,%r12d
- xorl %esi,%r13d
- rorxl $6,%ebx,%r14d
- leal (%r8,%r12,1),%r8d
- xorl %r14d,%r13d
- movl %r9d,%esi
- rorxl $22,%r9d,%r12d
- leal (%r8,%r13,1),%r8d
- xorl %r10d,%esi
- rorxl $13,%r9d,%r14d
- rorxl $2,%r9d,%r13d
- leal (%rax,%r8,1),%eax
- andl %esi,%r15d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 48-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %r10d,%r15d
- xorl %r13d,%r14d
- leal (%r8,%r15,1),%r8d
- movl %ebx,%r12d
- addl 32+64(%rsp),%edx
- andl %eax,%r12d
- rorxl $25,%eax,%r13d
- rorxl $11,%eax,%r15d
- leal (%r8,%r14,1),%r8d
- leal (%rdx,%r12,1),%edx
- andnl %ecx,%eax,%r12d
- xorl %r15d,%r13d
- rorxl $6,%eax,%r14d
- leal (%rdx,%r12,1),%edx
- xorl %r14d,%r13d
- movl %r8d,%r15d
- rorxl $22,%r8d,%r12d
- leal (%rdx,%r13,1),%edx
- xorl %r9d,%r15d
- rorxl $13,%r8d,%r14d
- rorxl $2,%r8d,%r13d
- leal (%r11,%rdx,1),%r11d
- andl %r15d,%esi
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 64-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %r9d,%esi
- xorl %r13d,%r14d
- leal (%rdx,%rsi,1),%edx
- movl %eax,%r12d
- addl 36+64(%rsp),%ecx
- andl %r11d,%r12d
- rorxl $25,%r11d,%r13d
- rorxl $11,%r11d,%esi
- leal (%rdx,%r14,1),%edx
- leal (%rcx,%r12,1),%ecx
- andnl %ebx,%r11d,%r12d
- xorl %esi,%r13d
- rorxl $6,%r11d,%r14d
- leal (%rcx,%r12,1),%ecx
- xorl %r14d,%r13d
- movl %edx,%esi
- rorxl $22,%edx,%r12d
- leal (%rcx,%r13,1),%ecx
- xorl %r8d,%esi
- rorxl $13,%edx,%r14d
- rorxl $2,%edx,%r13d
- leal (%r10,%rcx,1),%r10d
- andl %esi,%r15d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 80-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %r8d,%r15d
- xorl %r13d,%r14d
- leal (%rcx,%r15,1),%ecx
- movl %r11d,%r12d
- addl 40+64(%rsp),%ebx
- andl %r10d,%r12d
- rorxl $25,%r10d,%r13d
- rorxl $11,%r10d,%r15d
- leal (%rcx,%r14,1),%ecx
- leal (%rbx,%r12,1),%ebx
- andnl %eax,%r10d,%r12d
- xorl %r15d,%r13d
- rorxl $6,%r10d,%r14d
- leal (%rbx,%r12,1),%ebx
- xorl %r14d,%r13d
- movl %ecx,%r15d
- rorxl $22,%ecx,%r12d
- leal (%rbx,%r13,1),%ebx
- xorl %edx,%r15d
- rorxl $13,%ecx,%r14d
- rorxl $2,%ecx,%r13d
- leal (%r9,%rbx,1),%r9d
- andl %r15d,%esi
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 96-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %edx,%esi
- xorl %r13d,%r14d
- leal (%rbx,%rsi,1),%ebx
- movl %r10d,%r12d
- addl 44+64(%rsp),%eax
- andl %r9d,%r12d
- rorxl $25,%r9d,%r13d
- rorxl $11,%r9d,%esi
- leal (%rbx,%r14,1),%ebx
- leal (%rax,%r12,1),%eax
- andnl %r11d,%r9d,%r12d
- xorl %esi,%r13d
- rorxl $6,%r9d,%r14d
- leal (%rax,%r12,1),%eax
- xorl %r14d,%r13d
- movl %ebx,%esi
- rorxl $22,%ebx,%r12d
- leal (%rax,%r13,1),%eax
- xorl %ecx,%esi
- rorxl $13,%ebx,%r14d
- rorxl $2,%ebx,%r13d
- leal (%r8,%rax,1),%r8d
- andl %esi,%r15d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 112-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %ecx,%r15d
- xorl %r13d,%r14d
- leal (%rax,%r15,1),%eax
- movl %r9d,%r12d
- addl 0(%rsp),%r11d
- andl %r8d,%r12d
- rorxl $25,%r8d,%r13d
- rorxl $11,%r8d,%r15d
- leal (%rax,%r14,1),%eax
- leal (%r11,%r12,1),%r11d
- andnl %r10d,%r8d,%r12d
- xorl %r15d,%r13d
- rorxl $6,%r8d,%r14d
- leal (%r11,%r12,1),%r11d
- xorl %r14d,%r13d
- movl %eax,%r15d
- rorxl $22,%eax,%r12d
- leal (%r11,%r13,1),%r11d
- xorl %ebx,%r15d
- rorxl $13,%eax,%r14d
- rorxl $2,%eax,%r13d
- leal (%rdx,%r11,1),%edx
- andl %r15d,%esi
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 128-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %ebx,%esi
- xorl %r13d,%r14d
- leal (%r11,%rsi,1),%r11d
- movl %r8d,%r12d
- addl 4(%rsp),%r10d
- andl %edx,%r12d
- rorxl $25,%edx,%r13d
- rorxl $11,%edx,%esi
- leal (%r11,%r14,1),%r11d
- leal (%r10,%r12,1),%r10d
- andnl %r9d,%edx,%r12d
- xorl %esi,%r13d
- rorxl $6,%edx,%r14d
- leal (%r10,%r12,1),%r10d
- xorl %r14d,%r13d
- movl %r11d,%esi
- rorxl $22,%r11d,%r12d
- leal (%r10,%r13,1),%r10d
- xorl %eax,%esi
- rorxl $13,%r11d,%r14d
- rorxl $2,%r11d,%r13d
- leal (%rcx,%r10,1),%ecx
- andl %esi,%r15d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 144-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %eax,%r15d
- xorl %r13d,%r14d
- leal (%r10,%r15,1),%r10d
- movl %edx,%r12d
- addl 8(%rsp),%r9d
- andl %ecx,%r12d
- rorxl $25,%ecx,%r13d
- rorxl $11,%ecx,%r15d
- leal (%r10,%r14,1),%r10d
- leal (%r9,%r12,1),%r9d
- andnl %r8d,%ecx,%r12d
- xorl %r15d,%r13d
- rorxl $6,%ecx,%r14d
- leal (%r9,%r12,1),%r9d
- xorl %r14d,%r13d
- movl %r10d,%r15d
- rorxl $22,%r10d,%r12d
- leal (%r9,%r13,1),%r9d
- xorl %r11d,%r15d
- rorxl $13,%r10d,%r14d
- rorxl $2,%r10d,%r13d
- leal (%rbx,%r9,1),%ebx
- andl %r15d,%esi
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 160-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %r11d,%esi
- xorl %r13d,%r14d
- leal (%r9,%rsi,1),%r9d
- movl %ecx,%r12d
- addl 12(%rsp),%r8d
- andl %ebx,%r12d
- rorxl $25,%ebx,%r13d
- rorxl $11,%ebx,%esi
- leal (%r9,%r14,1),%r9d
- leal (%r8,%r12,1),%r8d
- andnl %edx,%ebx,%r12d
- xorl %esi,%r13d
- rorxl $6,%ebx,%r14d
- leal (%r8,%r12,1),%r8d
- xorl %r14d,%r13d
- movl %r9d,%esi
- rorxl $22,%r9d,%r12d
- leal (%r8,%r13,1),%r8d
- xorl %r10d,%esi
- rorxl $13,%r9d,%r14d
- rorxl $2,%r9d,%r13d
- leal (%rax,%r8,1),%eax
- andl %esi,%r15d
- vaesenclast %xmm10,%xmm9,%xmm11
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 176-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %r10d,%r15d
- xorl %r13d,%r14d
- leal (%r8,%r15,1),%r8d
- movl %ebx,%r12d
- addl 32(%rsp),%edx
- andl %eax,%r12d
- rorxl $25,%eax,%r13d
- rorxl $11,%eax,%r15d
- leal (%r8,%r14,1),%r8d
- leal (%rdx,%r12,1),%edx
- andnl %ecx,%eax,%r12d
- xorl %r15d,%r13d
- rorxl $6,%eax,%r14d
- leal (%rdx,%r12,1),%edx
- xorl %r14d,%r13d
- movl %r8d,%r15d
- rorxl $22,%r8d,%r12d
- leal (%rdx,%r13,1),%edx
- xorl %r9d,%r15d
- rorxl $13,%r8d,%r14d
- rorxl $2,%r8d,%r13d
- leal (%r11,%rdx,1),%r11d
- andl %r15d,%esi
- vpand %xmm12,%xmm11,%xmm8
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 192-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %r9d,%esi
- xorl %r13d,%r14d
- leal (%rdx,%rsi,1),%edx
- movl %eax,%r12d
- addl 36(%rsp),%ecx
- andl %r11d,%r12d
- rorxl $25,%r11d,%r13d
- rorxl $11,%r11d,%esi
- leal (%rdx,%r14,1),%edx
- leal (%rcx,%r12,1),%ecx
- andnl %ebx,%r11d,%r12d
- xorl %esi,%r13d
- rorxl $6,%r11d,%r14d
- leal (%rcx,%r12,1),%ecx
- xorl %r14d,%r13d
- movl %edx,%esi
- rorxl $22,%edx,%r12d
- leal (%rcx,%r13,1),%ecx
- xorl %r8d,%esi
- rorxl $13,%edx,%r14d
- rorxl $2,%edx,%r13d
- leal (%r10,%rcx,1),%r10d
- andl %esi,%r15d
- vaesenclast %xmm10,%xmm9,%xmm11
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 208-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %r8d,%r15d
- xorl %r13d,%r14d
- leal (%rcx,%r15,1),%ecx
- movl %r11d,%r12d
- addl 40(%rsp),%ebx
- andl %r10d,%r12d
- rorxl $25,%r10d,%r13d
- rorxl $11,%r10d,%r15d
- leal (%rcx,%r14,1),%ecx
- leal (%rbx,%r12,1),%ebx
- andnl %eax,%r10d,%r12d
- xorl %r15d,%r13d
- rorxl $6,%r10d,%r14d
- leal (%rbx,%r12,1),%ebx
- xorl %r14d,%r13d
- movl %ecx,%r15d
- rorxl $22,%ecx,%r12d
- leal (%rbx,%r13,1),%ebx
- xorl %edx,%r15d
- rorxl $13,%ecx,%r14d
- rorxl $2,%ecx,%r13d
- leal (%r9,%rbx,1),%r9d
- andl %r15d,%esi
- vpand %xmm13,%xmm11,%xmm11
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 224-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %edx,%esi
- xorl %r13d,%r14d
- leal (%rbx,%rsi,1),%ebx
- movl %r10d,%r12d
- addl 44(%rsp),%eax
- andl %r9d,%r12d
- rorxl $25,%r9d,%r13d
- rorxl $11,%r9d,%esi
- leal (%rbx,%r14,1),%ebx
- leal (%rax,%r12,1),%eax
- andnl %r11d,%r9d,%r12d
- xorl %esi,%r13d
- rorxl $6,%r9d,%r14d
- leal (%rax,%r12,1),%eax
- xorl %r14d,%r13d
- movl %ebx,%esi
- rorxl $22,%ebx,%r12d
- leal (%rax,%r13,1),%eax
- xorl %ecx,%esi
- rorxl $13,%ebx,%r14d
- rorxl $2,%ebx,%r13d
- leal (%r8,%rax,1),%r8d
- andl %esi,%r15d
- vpor %xmm11,%xmm8,%xmm8
- vaesenclast %xmm10,%xmm9,%xmm11
- vmovdqu 0-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %ecx,%r15d
- xorl %r13d,%r14d
- leal (%rax,%r15,1),%eax
- movl %r9d,%r12d
- vpextrq $1,%xmm15,%r12
- vmovq %xmm15,%r13
- movq 552(%rsp),%r15
- addl %r14d,%eax
- leaq 448(%rsp),%rbp
-
- vpand %xmm14,%xmm11,%xmm11
- vpor %xmm11,%xmm8,%xmm8
- vmovdqu %xmm8,(%r12,%r13,1)
- leaq 16(%r13),%r13
-
- addl 0(%r15),%eax
- addl 4(%r15),%ebx
- addl 8(%r15),%ecx
- addl 12(%r15),%edx
- addl 16(%r15),%r8d
- addl 20(%r15),%r9d
- addl 24(%r15),%r10d
- addl 28(%r15),%r11d
-
- movl %eax,0(%r15)
- movl %ebx,4(%r15)
- movl %ecx,8(%r15)
- movl %edx,12(%r15)
- movl %r8d,16(%r15)
- movl %r9d,20(%r15)
- movl %r10d,24(%r15)
- movl %r11d,28(%r15)
-
- cmpq 80(%rbp),%r13
- je .Ldone_avx2
-
- xorl %r14d,%r14d
- movl %ebx,%esi
- movl %r9d,%r12d
- xorl %ecx,%esi
- jmp .Lower_avx2
-.align 16
-.Lower_avx2:
- vmovdqu (%r13),%xmm9
- vpinsrq $0,%r13,%xmm15,%xmm15
- addl 0+16(%rbp),%r11d
- andl %r8d,%r12d
- rorxl $25,%r8d,%r13d
- rorxl $11,%r8d,%r15d
- leal (%rax,%r14,1),%eax
- leal (%r11,%r12,1),%r11d
- andnl %r10d,%r8d,%r12d
- xorl %r15d,%r13d
- rorxl $6,%r8d,%r14d
- leal (%r11,%r12,1),%r11d
- xorl %r14d,%r13d
- movl %eax,%r15d
- rorxl $22,%eax,%r12d
- leal (%r11,%r13,1),%r11d
- xorl %ebx,%r15d
- rorxl $13,%eax,%r14d
- rorxl $2,%eax,%r13d
- leal (%rdx,%r11,1),%edx
- andl %r15d,%esi
- vpxor %xmm10,%xmm9,%xmm9
- vmovdqu 16-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %ebx,%esi
- xorl %r13d,%r14d
- leal (%r11,%rsi,1),%r11d
- movl %r8d,%r12d
- addl 4+16(%rbp),%r10d
- andl %edx,%r12d
- rorxl $25,%edx,%r13d
- rorxl $11,%edx,%esi
- leal (%r11,%r14,1),%r11d
- leal (%r10,%r12,1),%r10d
- andnl %r9d,%edx,%r12d
- xorl %esi,%r13d
- rorxl $6,%edx,%r14d
- leal (%r10,%r12,1),%r10d
- xorl %r14d,%r13d
- movl %r11d,%esi
- rorxl $22,%r11d,%r12d
- leal (%r10,%r13,1),%r10d
- xorl %eax,%esi
- rorxl $13,%r11d,%r14d
- rorxl $2,%r11d,%r13d
- leal (%rcx,%r10,1),%ecx
- andl %esi,%r15d
- vpxor %xmm8,%xmm9,%xmm9
- xorl %r12d,%r14d
- xorl %eax,%r15d
- xorl %r13d,%r14d
- leal (%r10,%r15,1),%r10d
- movl %edx,%r12d
- addl 8+16(%rbp),%r9d
- andl %ecx,%r12d
- rorxl $25,%ecx,%r13d
- rorxl $11,%ecx,%r15d
- leal (%r10,%r14,1),%r10d
- leal (%r9,%r12,1),%r9d
- andnl %r8d,%ecx,%r12d
- xorl %r15d,%r13d
- rorxl $6,%ecx,%r14d
- leal (%r9,%r12,1),%r9d
- xorl %r14d,%r13d
- movl %r10d,%r15d
- rorxl $22,%r10d,%r12d
- leal (%r9,%r13,1),%r9d
- xorl %r11d,%r15d
- rorxl $13,%r10d,%r14d
- rorxl $2,%r10d,%r13d
- leal (%rbx,%r9,1),%ebx
- andl %r15d,%esi
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 32-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %r11d,%esi
- xorl %r13d,%r14d
- leal (%r9,%rsi,1),%r9d
- movl %ecx,%r12d
- addl 12+16(%rbp),%r8d
- andl %ebx,%r12d
- rorxl $25,%ebx,%r13d
- rorxl $11,%ebx,%esi
- leal (%r9,%r14,1),%r9d
- leal (%r8,%r12,1),%r8d
- andnl %edx,%ebx,%r12d
- xorl %esi,%r13d
- rorxl $6,%ebx,%r14d
- leal (%r8,%r12,1),%r8d
- xorl %r14d,%r13d
- movl %r9d,%esi
- rorxl $22,%r9d,%r12d
- leal (%r8,%r13,1),%r8d
- xorl %r10d,%esi
- rorxl $13,%r9d,%r14d
- rorxl $2,%r9d,%r13d
- leal (%rax,%r8,1),%eax
- andl %esi,%r15d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 48-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %r10d,%r15d
- xorl %r13d,%r14d
- leal (%r8,%r15,1),%r8d
- movl %ebx,%r12d
- addl 32+16(%rbp),%edx
- andl %eax,%r12d
- rorxl $25,%eax,%r13d
- rorxl $11,%eax,%r15d
- leal (%r8,%r14,1),%r8d
- leal (%rdx,%r12,1),%edx
- andnl %ecx,%eax,%r12d
- xorl %r15d,%r13d
- rorxl $6,%eax,%r14d
- leal (%rdx,%r12,1),%edx
- xorl %r14d,%r13d
- movl %r8d,%r15d
- rorxl $22,%r8d,%r12d
- leal (%rdx,%r13,1),%edx
- xorl %r9d,%r15d
- rorxl $13,%r8d,%r14d
- rorxl $2,%r8d,%r13d
- leal (%r11,%rdx,1),%r11d
- andl %r15d,%esi
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 64-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %r9d,%esi
- xorl %r13d,%r14d
- leal (%rdx,%rsi,1),%edx
- movl %eax,%r12d
- addl 36+16(%rbp),%ecx
- andl %r11d,%r12d
- rorxl $25,%r11d,%r13d
- rorxl $11,%r11d,%esi
- leal (%rdx,%r14,1),%edx
- leal (%rcx,%r12,1),%ecx
- andnl %ebx,%r11d,%r12d
- xorl %esi,%r13d
- rorxl $6,%r11d,%r14d
- leal (%rcx,%r12,1),%ecx
- xorl %r14d,%r13d
- movl %edx,%esi
- rorxl $22,%edx,%r12d
- leal (%rcx,%r13,1),%ecx
- xorl %r8d,%esi
- rorxl $13,%edx,%r14d
- rorxl $2,%edx,%r13d
- leal (%r10,%rcx,1),%r10d
- andl %esi,%r15d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 80-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %r8d,%r15d
- xorl %r13d,%r14d
- leal (%rcx,%r15,1),%ecx
- movl %r11d,%r12d
- addl 40+16(%rbp),%ebx
- andl %r10d,%r12d
- rorxl $25,%r10d,%r13d
- rorxl $11,%r10d,%r15d
- leal (%rcx,%r14,1),%ecx
- leal (%rbx,%r12,1),%ebx
- andnl %eax,%r10d,%r12d
- xorl %r15d,%r13d
- rorxl $6,%r10d,%r14d
- leal (%rbx,%r12,1),%ebx
- xorl %r14d,%r13d
- movl %ecx,%r15d
- rorxl $22,%ecx,%r12d
- leal (%rbx,%r13,1),%ebx
- xorl %edx,%r15d
- rorxl $13,%ecx,%r14d
- rorxl $2,%ecx,%r13d
- leal (%r9,%rbx,1),%r9d
- andl %r15d,%esi
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 96-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %edx,%esi
- xorl %r13d,%r14d
- leal (%rbx,%rsi,1),%ebx
- movl %r10d,%r12d
- addl 44+16(%rbp),%eax
- andl %r9d,%r12d
- rorxl $25,%r9d,%r13d
- rorxl $11,%r9d,%esi
- leal (%rbx,%r14,1),%ebx
- leal (%rax,%r12,1),%eax
- andnl %r11d,%r9d,%r12d
- xorl %esi,%r13d
- rorxl $6,%r9d,%r14d
- leal (%rax,%r12,1),%eax
- xorl %r14d,%r13d
- movl %ebx,%esi
- rorxl $22,%ebx,%r12d
- leal (%rax,%r13,1),%eax
- xorl %ecx,%esi
- rorxl $13,%ebx,%r14d
- rorxl $2,%ebx,%r13d
- leal (%r8,%rax,1),%r8d
- andl %esi,%r15d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 112-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %ecx,%r15d
- xorl %r13d,%r14d
- leal (%rax,%r15,1),%eax
- movl %r9d,%r12d
- leaq -64(%rbp),%rbp
- addl 0+16(%rbp),%r11d
- andl %r8d,%r12d
- rorxl $25,%r8d,%r13d
- rorxl $11,%r8d,%r15d
- leal (%rax,%r14,1),%eax
- leal (%r11,%r12,1),%r11d
- andnl %r10d,%r8d,%r12d
- xorl %r15d,%r13d
- rorxl $6,%r8d,%r14d
- leal (%r11,%r12,1),%r11d
- xorl %r14d,%r13d
- movl %eax,%r15d
- rorxl $22,%eax,%r12d
- leal (%r11,%r13,1),%r11d
- xorl %ebx,%r15d
- rorxl $13,%eax,%r14d
- rorxl $2,%eax,%r13d
- leal (%rdx,%r11,1),%edx
- andl %r15d,%esi
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 128-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %ebx,%esi
- xorl %r13d,%r14d
- leal (%r11,%rsi,1),%r11d
- movl %r8d,%r12d
- addl 4+16(%rbp),%r10d
- andl %edx,%r12d
- rorxl $25,%edx,%r13d
- rorxl $11,%edx,%esi
- leal (%r11,%r14,1),%r11d
- leal (%r10,%r12,1),%r10d
- andnl %r9d,%edx,%r12d
- xorl %esi,%r13d
- rorxl $6,%edx,%r14d
- leal (%r10,%r12,1),%r10d
- xorl %r14d,%r13d
- movl %r11d,%esi
- rorxl $22,%r11d,%r12d
- leal (%r10,%r13,1),%r10d
- xorl %eax,%esi
- rorxl $13,%r11d,%r14d
- rorxl $2,%r11d,%r13d
- leal (%rcx,%r10,1),%ecx
- andl %esi,%r15d
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 144-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %eax,%r15d
- xorl %r13d,%r14d
- leal (%r10,%r15,1),%r10d
- movl %edx,%r12d
- addl 8+16(%rbp),%r9d
- andl %ecx,%r12d
- rorxl $25,%ecx,%r13d
- rorxl $11,%ecx,%r15d
- leal (%r10,%r14,1),%r10d
- leal (%r9,%r12,1),%r9d
- andnl %r8d,%ecx,%r12d
- xorl %r15d,%r13d
- rorxl $6,%ecx,%r14d
- leal (%r9,%r12,1),%r9d
- xorl %r14d,%r13d
- movl %r10d,%r15d
- rorxl $22,%r10d,%r12d
- leal (%r9,%r13,1),%r9d
- xorl %r11d,%r15d
- rorxl $13,%r10d,%r14d
- rorxl $2,%r10d,%r13d
- leal (%rbx,%r9,1),%ebx
- andl %r15d,%esi
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 160-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %r11d,%esi
- xorl %r13d,%r14d
- leal (%r9,%rsi,1),%r9d
- movl %ecx,%r12d
- addl 12+16(%rbp),%r8d
- andl %ebx,%r12d
- rorxl $25,%ebx,%r13d
- rorxl $11,%ebx,%esi
- leal (%r9,%r14,1),%r9d
- leal (%r8,%r12,1),%r8d
- andnl %edx,%ebx,%r12d
- xorl %esi,%r13d
- rorxl $6,%ebx,%r14d
- leal (%r8,%r12,1),%r8d
- xorl %r14d,%r13d
- movl %r9d,%esi
- rorxl $22,%r9d,%r12d
- leal (%r8,%r13,1),%r8d
- xorl %r10d,%esi
- rorxl $13,%r9d,%r14d
- rorxl $2,%r9d,%r13d
- leal (%rax,%r8,1),%eax
- andl %esi,%r15d
- vaesenclast %xmm10,%xmm9,%xmm11
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 176-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %r10d,%r15d
- xorl %r13d,%r14d
- leal (%r8,%r15,1),%r8d
- movl %ebx,%r12d
- addl 32+16(%rbp),%edx
- andl %eax,%r12d
- rorxl $25,%eax,%r13d
- rorxl $11,%eax,%r15d
- leal (%r8,%r14,1),%r8d
- leal (%rdx,%r12,1),%edx
- andnl %ecx,%eax,%r12d
- xorl %r15d,%r13d
- rorxl $6,%eax,%r14d
- leal (%rdx,%r12,1),%edx
- xorl %r14d,%r13d
- movl %r8d,%r15d
- rorxl $22,%r8d,%r12d
- leal (%rdx,%r13,1),%edx
- xorl %r9d,%r15d
- rorxl $13,%r8d,%r14d
- rorxl $2,%r8d,%r13d
- leal (%r11,%rdx,1),%r11d
- andl %r15d,%esi
- vpand %xmm12,%xmm11,%xmm8
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 192-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %r9d,%esi
- xorl %r13d,%r14d
- leal (%rdx,%rsi,1),%edx
- movl %eax,%r12d
- addl 36+16(%rbp),%ecx
- andl %r11d,%r12d
- rorxl $25,%r11d,%r13d
- rorxl $11,%r11d,%esi
- leal (%rdx,%r14,1),%edx
- leal (%rcx,%r12,1),%ecx
- andnl %ebx,%r11d,%r12d
- xorl %esi,%r13d
- rorxl $6,%r11d,%r14d
- leal (%rcx,%r12,1),%ecx
- xorl %r14d,%r13d
- movl %edx,%esi
- rorxl $22,%edx,%r12d
- leal (%rcx,%r13,1),%ecx
- xorl %r8d,%esi
- rorxl $13,%edx,%r14d
- rorxl $2,%edx,%r13d
- leal (%r10,%rcx,1),%r10d
- andl %esi,%r15d
- vaesenclast %xmm10,%xmm9,%xmm11
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 208-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %r8d,%r15d
- xorl %r13d,%r14d
- leal (%rcx,%r15,1),%ecx
- movl %r11d,%r12d
- addl 40+16(%rbp),%ebx
- andl %r10d,%r12d
- rorxl $25,%r10d,%r13d
- rorxl $11,%r10d,%r15d
- leal (%rcx,%r14,1),%ecx
- leal (%rbx,%r12,1),%ebx
- andnl %eax,%r10d,%r12d
- xorl %r15d,%r13d
- rorxl $6,%r10d,%r14d
- leal (%rbx,%r12,1),%ebx
- xorl %r14d,%r13d
- movl %ecx,%r15d
- rorxl $22,%ecx,%r12d
- leal (%rbx,%r13,1),%ebx
- xorl %edx,%r15d
- rorxl $13,%ecx,%r14d
- rorxl $2,%ecx,%r13d
- leal (%r9,%rbx,1),%r9d
- andl %r15d,%esi
- vpand %xmm13,%xmm11,%xmm11
- vaesenc %xmm10,%xmm9,%xmm9
- vmovdqu 224-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %edx,%esi
- xorl %r13d,%r14d
- leal (%rbx,%rsi,1),%ebx
- movl %r10d,%r12d
- addl 44+16(%rbp),%eax
- andl %r9d,%r12d
- rorxl $25,%r9d,%r13d
- rorxl $11,%r9d,%esi
- leal (%rbx,%r14,1),%ebx
- leal (%rax,%r12,1),%eax
- andnl %r11d,%r9d,%r12d
- xorl %esi,%r13d
- rorxl $6,%r9d,%r14d
- leal (%rax,%r12,1),%eax
- xorl %r14d,%r13d
- movl %ebx,%esi
- rorxl $22,%ebx,%r12d
- leal (%rax,%r13,1),%eax
- xorl %ecx,%esi
- rorxl $13,%ebx,%r14d
- rorxl $2,%ebx,%r13d
- leal (%r8,%rax,1),%r8d
- andl %esi,%r15d
- vpor %xmm11,%xmm8,%xmm8
- vaesenclast %xmm10,%xmm9,%xmm11
- vmovdqu 0-128(%rdi),%xmm10
- xorl %r12d,%r14d
- xorl %ecx,%r15d
- xorl %r13d,%r14d
- leal (%rax,%r15,1),%eax
- movl %r9d,%r12d
- vmovq %xmm15,%r13
- vpextrq $1,%xmm15,%r15
- vpand %xmm14,%xmm11,%xmm11
- vpor %xmm11,%xmm8,%xmm8
- leaq -64(%rbp),%rbp
- vmovdqu %xmm8,(%r15,%r13,1)
- leaq 16(%r13),%r13
- cmpq %rsp,%rbp
- jae .Lower_avx2
-
- movq 552(%rsp),%r15
- leaq 64(%r13),%r13
- movq 560(%rsp),%rsi
- addl %r14d,%eax
- leaq 448(%rsp),%rsp
-
- addl 0(%r15),%eax
- addl 4(%r15),%ebx
- addl 8(%r15),%ecx
- addl 12(%r15),%edx
- addl 16(%r15),%r8d
- addl 20(%r15),%r9d
- addl 24(%r15),%r10d
- leaq (%rsi,%r13,1),%r12
- addl 28(%r15),%r11d
-
- cmpq 64+16(%rsp),%r13
-
- movl %eax,0(%r15)
- cmoveq %rsp,%r12
- movl %ebx,4(%r15)
- movl %ecx,8(%r15)
- movl %edx,12(%r15)
- movl %r8d,16(%r15)
- movl %r9d,20(%r15)
- movl %r10d,24(%r15)
- movl %r11d,28(%r15)
-
- jbe .Loop_avx2
- leaq (%rsp),%rbp
-
-.Ldone_avx2:
- leaq (%rbp),%rsp
- movq 64+32(%rsp),%r8
- movq 120(%rsp),%rsi
-.cfi_def_cfa %rsi,8
- vmovdqu %xmm8,(%r8)
- vzeroall
- movq -48(%rsi),%r15
-.cfi_restore %r15
- movq -40(%rsi),%r14
-.cfi_restore %r14
- movq -32(%rsi),%r13
-.cfi_restore %r13
- movq -24(%rsi),%r12
-.cfi_restore %r12
- movq -16(%rsi),%rbp
-.cfi_restore %rbp
- movq -8(%rsi),%rbx
-.cfi_restore %rbx
- leaq (%rsi),%rsp
-.cfi_def_cfa_register %rsp
-.Lepilogue_avx2:
- .byte 0xf3,0xc3
-.cfi_endproc
-.size aesni_cbc_sha256_enc_avx2,.-aesni_cbc_sha256_enc_avx2
-.type aesni_cbc_sha256_enc_shaext,@function
-.align 32
-aesni_cbc_sha256_enc_shaext:
- movq 8(%rsp),%r10
- leaq K256+128(%rip),%rax
- movdqu (%r9),%xmm1
- movdqu 16(%r9),%xmm2
- movdqa 512-128(%rax),%xmm3
-
- movl 240(%rcx),%r11d
- subq %rdi,%rsi
- movups (%rcx),%xmm15
- movups (%r8),%xmm6
- movups 16(%rcx),%xmm4
- leaq 112(%rcx),%rcx
-
- pshufd $0x1b,%xmm1,%xmm0
- pshufd $0xb1,%xmm1,%xmm1
- pshufd $0x1b,%xmm2,%xmm2
- movdqa %xmm3,%xmm7
-.byte 102,15,58,15,202,8
- punpcklqdq %xmm0,%xmm2
-
- jmp .Loop_shaext
-
-.align 16
-.Loop_shaext:
- movdqu (%r10),%xmm10
- movdqu 16(%r10),%xmm11
- movdqu 32(%r10),%xmm12
-.byte 102,68,15,56,0,211
- movdqu 48(%r10),%xmm13
-
- movdqa 0-128(%rax),%xmm0
- paddd %xmm10,%xmm0
-.byte 102,68,15,56,0,219
- movdqa %xmm2,%xmm9
- movdqa %xmm1,%xmm8
- movups 0(%rdi),%xmm14
- xorps %xmm15,%xmm14
- xorps %xmm14,%xmm6
- movups -80(%rcx),%xmm5
- aesenc %xmm4,%xmm6
-.byte 15,56,203,209
- pshufd $0x0e,%xmm0,%xmm0
- movups -64(%rcx),%xmm4
- aesenc %xmm5,%xmm6
-.byte 15,56,203,202
-
- movdqa 32-128(%rax),%xmm0
- paddd %xmm11,%xmm0
-.byte 102,68,15,56,0,227
- leaq 64(%r10),%r10
- movups -48(%rcx),%xmm5
- aesenc %xmm4,%xmm6
-.byte 15,56,203,209
- pshufd $0x0e,%xmm0,%xmm0
- movups -32(%rcx),%xmm4
- aesenc %xmm5,%xmm6
-.byte 15,56,203,202
-
- movdqa 64-128(%rax),%xmm0
- paddd %xmm12,%xmm0
-.byte 102,68,15,56,0,235
-.byte 69,15,56,204,211
- movups -16(%rcx),%xmm5
- aesenc %xmm4,%xmm6
-.byte 15,56,203,209
- pshufd $0x0e,%xmm0,%xmm0
- movdqa %xmm13,%xmm3
-.byte 102,65,15,58,15,220,4
- paddd %xmm3,%xmm10
- movups 0(%rcx),%xmm4
- aesenc %xmm5,%xmm6
-.byte 15,56,203,202
-
- movdqa 96-128(%rax),%xmm0
- paddd %xmm13,%xmm0
-.byte 69,15,56,205,213
-.byte 69,15,56,204,220
- movups 16(%rcx),%xmm5
- aesenc %xmm4,%xmm6
-.byte 15,56,203,209
- pshufd $0x0e,%xmm0,%xmm0
- movups 32(%rcx),%xmm4
- aesenc %xmm5,%xmm6
- movdqa %xmm10,%xmm3
-.byte 102,65,15,58,15,221,4
- paddd %xmm3,%xmm11
-.byte 15,56,203,202
- movdqa 128-128(%rax),%xmm0
- paddd %xmm10,%xmm0
-.byte 69,15,56,205,218
-.byte 69,15,56,204,229
- movups 48(%rcx),%xmm5
- aesenc %xmm4,%xmm6
-.byte 15,56,203,209
- pshufd $0x0e,%xmm0,%xmm0
- movdqa %xmm11,%xmm3
-.byte 102,65,15,58,15,218,4
- paddd %xmm3,%xmm12
- cmpl $11,%r11d
- jb .Laesenclast1
- movups 64(%rcx),%xmm4
- aesenc %xmm5,%xmm6
- movups 80(%rcx),%xmm5
- aesenc %xmm4,%xmm6
- je .Laesenclast1
- movups 96(%rcx),%xmm4
- aesenc %xmm5,%xmm6
- movups 112(%rcx),%xmm5
- aesenc %xmm4,%xmm6
-.Laesenclast1:
- aesenclast %xmm5,%xmm6
- movups 16-112(%rcx),%xmm4
- nop
-.byte 15,56,203,202
- movups 16(%rdi),%xmm14
- xorps %xmm15,%xmm14
- movups %xmm6,0(%rsi,%rdi,1)
- xorps %xmm14,%xmm6
- movups -80(%rcx),%xmm5
- aesenc %xmm4,%xmm6
- movdqa 160-128(%rax),%xmm0
- paddd %xmm11,%xmm0
-.byte 69,15,56,205,227
-.byte 69,15,56,204,234
- movups -64(%rcx),%xmm4
- aesenc %xmm5,%xmm6
-.byte 15,56,203,209
- pshufd $0x0e,%xmm0,%xmm0
- movdqa %xmm12,%xmm3
-.byte 102,65,15,58,15,219,4
- paddd %xmm3,%xmm13
- movups -48(%rcx),%xmm5
- aesenc %xmm4,%xmm6
-.byte 15,56,203,202
- movdqa 192-128(%rax),%xmm0
- paddd %xmm12,%xmm0
-.byte 69,15,56,205,236
-.byte 69,15,56,204,211
- movups -32(%rcx),%xmm4
- aesenc %xmm5,%xmm6
-.byte 15,56,203,209
- pshufd $0x0e,%xmm0,%xmm0
- movdqa %xmm13,%xmm3
-.byte 102,65,15,58,15,220,4
- paddd %xmm3,%xmm10
- movups -16(%rcx),%xmm5
- aesenc %xmm4,%xmm6
-.byte 15,56,203,202
- movdqa 224-128(%rax),%xmm0
- paddd %xmm13,%xmm0
-.byte 69,15,56,205,213
-.byte 69,15,56,204,220
- movups 0(%rcx),%xmm4
- aesenc %xmm5,%xmm6
-.byte 15,56,203,209
- pshufd $0x0e,%xmm0,%xmm0
- movdqa %xmm10,%xmm3
-.byte 102,65,15,58,15,221,4
- paddd %xmm3,%xmm11
- movups 16(%rcx),%xmm5
- aesenc %xmm4,%xmm6
-.byte 15,56,203,202
- movdqa 256-128(%rax),%xmm0
- paddd %xmm10,%xmm0
-.byte 69,15,56,205,218
-.byte 69,15,56,204,229
- movups 32(%rcx),%xmm4
- aesenc %xmm5,%xmm6
-.byte 15,56,203,209
- pshufd $0x0e,%xmm0,%xmm0
- movdqa %xmm11,%xmm3
-.byte 102,65,15,58,15,218,4
- paddd %xmm3,%xmm12
- movups 48(%rcx),%xmm5
- aesenc %xmm4,%xmm6
- cmpl $11,%r11d
- jb .Laesenclast2
- movups 64(%rcx),%xmm4
- aesenc %xmm5,%xmm6
- movups 80(%rcx),%xmm5
- aesenc %xmm4,%xmm6
- je .Laesenclast2
- movups 96(%rcx),%xmm4
- aesenc %xmm5,%xmm6
- movups 112(%rcx),%xmm5
- aesenc %xmm4,%xmm6
-.Laesenclast2:
- aesenclast %xmm5,%xmm6
- movups 16-112(%rcx),%xmm4
- nop
-.byte 15,56,203,202
- movups 32(%rdi),%xmm14
- xorps %xmm15,%xmm14
- movups %xmm6,16(%rsi,%rdi,1)
- xorps %xmm14,%xmm6
- movups -80(%rcx),%xmm5
- aesenc %xmm4,%xmm6
- movdqa 288-128(%rax),%xmm0
- paddd %xmm11,%xmm0
-.byte 69,15,56,205,227
-.byte 69,15,56,204,234
- movups -64(%rcx),%xmm4
- aesenc %xmm5,%xmm6
-.byte 15,56,203,209
- pshufd $0x0e,%xmm0,%xmm0
- movdqa %xmm12,%xmm3
-.byte 102,65,15,58,15,219,4
- paddd %xmm3,%xmm13
- movups -48(%rcx),%xmm5
- aesenc %xmm4,%xmm6
-.byte 15,56,203,202
- movdqa 320-128(%rax),%xmm0
- paddd %xmm12,%xmm0
-.byte 69,15,56,205,236
-.byte 69,15,56,204,211
- movups -32(%rcx),%xmm4
- aesenc %xmm5,%xmm6
-.byte 15,56,203,209
- pshufd $0x0e,%xmm0,%xmm0
- movdqa %xmm13,%xmm3
-.byte 102,65,15,58,15,220,4
- paddd %xmm3,%xmm10
- movups -16(%rcx),%xmm5
- aesenc %xmm4,%xmm6
-.byte 15,56,203,202
- movdqa 352-128(%rax),%xmm0
- paddd %xmm13,%xmm0
-.byte 69,15,56,205,213
-.byte 69,15,56,204,220
- movups 0(%rcx),%xmm4
- aesenc %xmm5,%xmm6
-.byte 15,56,203,209
- pshufd $0x0e,%xmm0,%xmm0
- movdqa %xmm10,%xmm3
-.byte 102,65,15,58,15,221,4
- paddd %xmm3,%xmm11
- movups 16(%rcx),%xmm5
- aesenc %xmm4,%xmm6
-.byte 15,56,203,202
- movdqa 384-128(%rax),%xmm0
- paddd %xmm10,%xmm0
-.byte 69,15,56,205,218
-.byte 69,15,56,204,229
- movups 32(%rcx),%xmm4
- aesenc %xmm5,%xmm6
-.byte 15,56,203,209
- pshufd $0x0e,%xmm0,%xmm0
- movdqa %xmm11,%xmm3
-.byte 102,65,15,58,15,218,4
- paddd %xmm3,%xmm12
- movups 48(%rcx),%xmm5
- aesenc %xmm4,%xmm6
-.byte 15,56,203,202
- movdqa 416-128(%rax),%xmm0
- paddd %xmm11,%xmm0
-.byte 69,15,56,205,227
-.byte 69,15,56,204,234
- cmpl $11,%r11d
- jb .Laesenclast3
- movups 64(%rcx),%xmm4
- aesenc %xmm5,%xmm6
- movups 80(%rcx),%xmm5
- aesenc %xmm4,%xmm6
- je .Laesenclast3
- movups 96(%rcx),%xmm4
- aesenc %xmm5,%xmm6
- movups 112(%rcx),%xmm5
- aesenc %xmm4,%xmm6
-.Laesenclast3:
- aesenclast %xmm5,%xmm6
- movups 16-112(%rcx),%xmm4
- nop
-.byte 15,56,203,209
- pshufd $0x0e,%xmm0,%xmm0
- movdqa %xmm12,%xmm3
-.byte 102,65,15,58,15,219,4
- paddd %xmm3,%xmm13
- movups 48(%rdi),%xmm14
- xorps %xmm15,%xmm14
- movups %xmm6,32(%rsi,%rdi,1)
- xorps %xmm14,%xmm6
- movups -80(%rcx),%xmm5
- aesenc %xmm4,%xmm6
- movups -64(%rcx),%xmm4
- aesenc %xmm5,%xmm6
-.byte 15,56,203,202
-
- movdqa 448-128(%rax),%xmm0
- paddd %xmm12,%xmm0
-.byte 69,15,56,205,236
- movdqa %xmm7,%xmm3
- movups -48(%rcx),%xmm5
- aesenc %xmm4,%xmm6
-.byte 15,56,203,209
- pshufd $0x0e,%xmm0,%xmm0
- movups -32(%rcx),%xmm4
- aesenc %xmm5,%xmm6
-.byte 15,56,203,202
-
- movdqa 480-128(%rax),%xmm0
- paddd %xmm13,%xmm0
- movups -16(%rcx),%xmm5
- aesenc %xmm4,%xmm6
- movups 0(%rcx),%xmm4
- aesenc %xmm5,%xmm6
-.byte 15,56,203,209
- pshufd $0x0e,%xmm0,%xmm0
- movups 16(%rcx),%xmm5
- aesenc %xmm4,%xmm6
-.byte 15,56,203,202
-
- movups 32(%rcx),%xmm4
- aesenc %xmm5,%xmm6
- movups 48(%rcx),%xmm5
- aesenc %xmm4,%xmm6
- cmpl $11,%r11d
- jb .Laesenclast4
- movups 64(%rcx),%xmm4
- aesenc %xmm5,%xmm6
- movups 80(%rcx),%xmm5
- aesenc %xmm4,%xmm6
- je .Laesenclast4
- movups 96(%rcx),%xmm4
- aesenc %xmm5,%xmm6
- movups 112(%rcx),%xmm5
- aesenc %xmm4,%xmm6
-.Laesenclast4:
- aesenclast %xmm5,%xmm6
- movups 16-112(%rcx),%xmm4
- nop
-
- paddd %xmm9,%xmm2
- paddd %xmm8,%xmm1
-
- decq %rdx
- movups %xmm6,48(%rsi,%rdi,1)
- leaq 64(%rdi),%rdi
- jnz .Loop_shaext
-
- pshufd $0xb1,%xmm2,%xmm2
- pshufd $0x1b,%xmm1,%xmm3
- pshufd $0xb1,%xmm1,%xmm1
- punpckhqdq %xmm2,%xmm1
-.byte 102,15,58,15,211,8
-
- movups %xmm6,(%r8)
- movdqu %xmm1,(%r9)
- movdqu %xmm2,16(%r9)
- .byte 0xf3,0xc3
-.size aesni_cbc_sha256_enc_shaext,.-aesni_cbc_sha256_enc_shaext
diff --git a/secure/lib/libcrypto/amd64/aesni-x86_64.S b/secure/lib/libcrypto/amd64/aesni-x86_64.S
index e2ef2d6666cb5..ce3ba1266de1a 100644
--- a/secure/lib/libcrypto/amd64/aesni-x86_64.S
+++ b/secure/lib/libcrypto/amd64/aesni-x86_64.S
@@ -863,6 +863,7 @@ aesni_ecb_encrypt:
.type aesni_ccm64_encrypt_blocks,@function
.align 16
aesni_ccm64_encrypt_blocks:
+.cfi_startproc
movl 240(%rcx),%eax
movdqu (%r8),%xmm6
movdqa .Lincrement64(%rip),%xmm9
@@ -921,11 +922,13 @@ aesni_ccm64_encrypt_blocks:
pxor %xmm8,%xmm8
pxor %xmm6,%xmm6
.byte 0xf3,0xc3
+.cfi_endproc
.size aesni_ccm64_encrypt_blocks,.-aesni_ccm64_encrypt_blocks
.globl aesni_ccm64_decrypt_blocks
.type aesni_ccm64_decrypt_blocks,@function
.align 16
aesni_ccm64_decrypt_blocks:
+.cfi_startproc
movl 240(%rcx),%eax
movups (%r8),%xmm6
movdqu (%r9),%xmm3
@@ -1018,6 +1021,7 @@ aesni_ccm64_decrypt_blocks:
pxor %xmm8,%xmm8
pxor %xmm6,%xmm6
.byte 0xf3,0xc3
+.cfi_endproc
.size aesni_ccm64_decrypt_blocks,.-aesni_ccm64_decrypt_blocks
.globl aesni_ctr32_encrypt_blocks
.type aesni_ctr32_encrypt_blocks,@function
@@ -2792,6 +2796,7 @@ aesni_ocb_encrypt:
.type __ocb_encrypt6,@function
.align 32
__ocb_encrypt6:
+.cfi_startproc
pxor %xmm9,%xmm15
movdqu (%rbx,%r12,1),%xmm11
movdqa %xmm10,%xmm12
@@ -2889,11 +2894,13 @@ __ocb_encrypt6:
.byte 102,65,15,56,221,246
.byte 102,65,15,56,221,255
.byte 0xf3,0xc3
+.cfi_endproc
.size __ocb_encrypt6,.-__ocb_encrypt6
.type __ocb_encrypt4,@function
.align 32
__ocb_encrypt4:
+.cfi_startproc
pxor %xmm9,%xmm15
movdqu (%rbx,%r12,1),%xmm11
movdqa %xmm10,%xmm12
@@ -2958,11 +2965,13 @@ __ocb_encrypt4:
.byte 102,65,15,56,221,228
.byte 102,65,15,56,221,237
.byte 0xf3,0xc3
+.cfi_endproc
.size __ocb_encrypt4,.-__ocb_encrypt4
.type __ocb_encrypt1,@function
.align 32
__ocb_encrypt1:
+.cfi_startproc
pxor %xmm15,%xmm7
pxor %xmm9,%xmm7
pxor %xmm2,%xmm8
@@ -2993,6 +3002,7 @@ __ocb_encrypt1:
.byte 102,15,56,221,215
.byte 0xf3,0xc3
+.cfi_endproc
.size __ocb_encrypt1,.-__ocb_encrypt1
.globl aesni_ocb_decrypt
@@ -3235,6 +3245,7 @@ aesni_ocb_decrypt:
.type __ocb_decrypt6,@function
.align 32
__ocb_decrypt6:
+.cfi_startproc
pxor %xmm9,%xmm15
movdqu (%rbx,%r12,1),%xmm11
movdqa %xmm10,%xmm12
@@ -3326,11 +3337,13 @@ __ocb_decrypt6:
.byte 102,65,15,56,223,246
.byte 102,65,15,56,223,255
.byte 0xf3,0xc3
+.cfi_endproc
.size __ocb_decrypt6,.-__ocb_decrypt6
.type __ocb_decrypt4,@function
.align 32
__ocb_decrypt4:
+.cfi_startproc
pxor %xmm9,%xmm15
movdqu (%rbx,%r12,1),%xmm11
movdqa %xmm10,%xmm12
@@ -3391,11 +3404,13 @@ __ocb_decrypt4:
.byte 102,65,15,56,223,228
.byte 102,65,15,56,223,237
.byte 0xf3,0xc3
+.cfi_endproc
.size __ocb_decrypt4,.-__ocb_decrypt4
.type __ocb_decrypt1,@function
.align 32
__ocb_decrypt1:
+.cfi_startproc
pxor %xmm15,%xmm7
pxor %xmm9,%xmm7
pxor %xmm7,%xmm2
@@ -3425,6 +3440,7 @@ __ocb_decrypt1:
.byte 102,15,56,223,215
.byte 0xf3,0xc3
+.cfi_endproc
.size __ocb_decrypt1,.-__ocb_decrypt1
.globl aesni_cbc_encrypt
.type aesni_cbc_encrypt,@function
@@ -4363,7 +4379,6 @@ __aesni_set_encrypt_key:
addq $8,%rsp
.cfi_adjust_cfa_offset -8
.byte 0xf3,0xc3
-.cfi_endproc
.LSEH_end_set_encrypt_key:
.align 16
@@ -4434,6 +4449,7 @@ __aesni_set_encrypt_key:
shufps $170,%xmm1,%xmm1
xorps %xmm1,%xmm2
.byte 0xf3,0xc3
+.cfi_endproc
.size aesni_set_encrypt_key,.-aesni_set_encrypt_key
.size __aesni_set_encrypt_key,.-__aesni_set_encrypt_key
.align 64
diff --git a/secure/lib/libcrypto/amd64/chacha-x86_64.S b/secure/lib/libcrypto/amd64/chacha-x86_64.S
index b01c1b87d47b4..0b3d5b8b6db4f 100644
--- a/secure/lib/libcrypto/amd64/chacha-x86_64.S
+++ b/secure/lib/libcrypto/amd64/chacha-x86_64.S
@@ -331,8 +331,6 @@ ChaCha20_ssse3:
.LChaCha20_ssse3:
movq %rsp,%r9
.cfi_def_cfa_register %r9
- testl $2048,%r10d
- jnz .LChaCha20_4xop
cmpq $128,%rdx
je .LChaCha20_128
ja .LChaCha20_4x
@@ -628,9 +626,6 @@ ChaCha20_4x:
movq %rsp,%r9
.cfi_def_cfa_register %r9
movq %r10,%r11
- shrq $32,%r10
- testq $32,%r10
- jnz .LChaCha20_8x
cmpq $192,%rdx
ja .Lproceed4x
@@ -1172,1024 +1167,3 @@ ChaCha20_4x:
.byte 0xf3,0xc3
.cfi_endproc
.size ChaCha20_4x,.-ChaCha20_4x
-.type ChaCha20_4xop,@function
-.align 32
-ChaCha20_4xop:
-.cfi_startproc
-.LChaCha20_4xop:
- movq %rsp,%r9
-.cfi_def_cfa_register %r9
- subq $0x140+8,%rsp
- vzeroupper
-
- vmovdqa .Lsigma(%rip),%xmm11
- vmovdqu (%rcx),%xmm3
- vmovdqu 16(%rcx),%xmm15
- vmovdqu (%r8),%xmm7
- leaq 256(%rsp),%rcx
-
- vpshufd $0x00,%xmm11,%xmm8
- vpshufd $0x55,%xmm11,%xmm9
- vmovdqa %xmm8,64(%rsp)
- vpshufd $0xaa,%xmm11,%xmm10
- vmovdqa %xmm9,80(%rsp)
- vpshufd $0xff,%xmm11,%xmm11
- vmovdqa %xmm10,96(%rsp)
- vmovdqa %xmm11,112(%rsp)
-
- vpshufd $0x00,%xmm3,%xmm0
- vpshufd $0x55,%xmm3,%xmm1
- vmovdqa %xmm0,128-256(%rcx)
- vpshufd $0xaa,%xmm3,%xmm2
- vmovdqa %xmm1,144-256(%rcx)
- vpshufd $0xff,%xmm3,%xmm3
- vmovdqa %xmm2,160-256(%rcx)
- vmovdqa %xmm3,176-256(%rcx)
-
- vpshufd $0x00,%xmm15,%xmm12
- vpshufd $0x55,%xmm15,%xmm13
- vmovdqa %xmm12,192-256(%rcx)
- vpshufd $0xaa,%xmm15,%xmm14
- vmovdqa %xmm13,208-256(%rcx)
- vpshufd $0xff,%xmm15,%xmm15
- vmovdqa %xmm14,224-256(%rcx)
- vmovdqa %xmm15,240-256(%rcx)
-
- vpshufd $0x00,%xmm7,%xmm4
- vpshufd $0x55,%xmm7,%xmm5
- vpaddd .Linc(%rip),%xmm4,%xmm4
- vpshufd $0xaa,%xmm7,%xmm6
- vmovdqa %xmm5,272-256(%rcx)
- vpshufd $0xff,%xmm7,%xmm7
- vmovdqa %xmm6,288-256(%rcx)
- vmovdqa %xmm7,304-256(%rcx)
-
- jmp .Loop_enter4xop
-
-.align 32
-.Loop_outer4xop:
- vmovdqa 64(%rsp),%xmm8
- vmovdqa 80(%rsp),%xmm9
- vmovdqa 96(%rsp),%xmm10
- vmovdqa 112(%rsp),%xmm11
- vmovdqa 128-256(%rcx),%xmm0
- vmovdqa 144-256(%rcx),%xmm1
- vmovdqa 160-256(%rcx),%xmm2
- vmovdqa 176-256(%rcx),%xmm3
- vmovdqa 192-256(%rcx),%xmm12
- vmovdqa 208-256(%rcx),%xmm13
- vmovdqa 224-256(%rcx),%xmm14
- vmovdqa 240-256(%rcx),%xmm15
- vmovdqa 256-256(%rcx),%xmm4
- vmovdqa 272-256(%rcx),%xmm5
- vmovdqa 288-256(%rcx),%xmm6
- vmovdqa 304-256(%rcx),%xmm7
- vpaddd .Lfour(%rip),%xmm4,%xmm4
-
-.Loop_enter4xop:
- movl $10,%eax
- vmovdqa %xmm4,256-256(%rcx)
- jmp .Loop4xop
-
-.align 32
-.Loop4xop:
- vpaddd %xmm0,%xmm8,%xmm8
- vpaddd %xmm1,%xmm9,%xmm9
- vpaddd %xmm2,%xmm10,%xmm10
- vpaddd %xmm3,%xmm11,%xmm11
- vpxor %xmm4,%xmm8,%xmm4
- vpxor %xmm5,%xmm9,%xmm5
- vpxor %xmm6,%xmm10,%xmm6
- vpxor %xmm7,%xmm11,%xmm7
-.byte 143,232,120,194,228,16
-.byte 143,232,120,194,237,16
-.byte 143,232,120,194,246,16
-.byte 143,232,120,194,255,16
- vpaddd %xmm4,%xmm12,%xmm12
- vpaddd %xmm5,%xmm13,%xmm13
- vpaddd %xmm6,%xmm14,%xmm14
- vpaddd %xmm7,%xmm15,%xmm15
- vpxor %xmm0,%xmm12,%xmm0
- vpxor %xmm1,%xmm13,%xmm1
- vpxor %xmm14,%xmm2,%xmm2
- vpxor %xmm15,%xmm3,%xmm3
-.byte 143,232,120,194,192,12
-.byte 143,232,120,194,201,12
-.byte 143,232,120,194,210,12
-.byte 143,232,120,194,219,12
- vpaddd %xmm8,%xmm0,%xmm8
- vpaddd %xmm9,%xmm1,%xmm9
- vpaddd %xmm2,%xmm10,%xmm10
- vpaddd %xmm3,%xmm11,%xmm11
- vpxor %xmm4,%xmm8,%xmm4
- vpxor %xmm5,%xmm9,%xmm5
- vpxor %xmm6,%xmm10,%xmm6
- vpxor %xmm7,%xmm11,%xmm7
-.byte 143,232,120,194,228,8
-.byte 143,232,120,194,237,8
-.byte 143,232,120,194,246,8
-.byte 143,232,120,194,255,8
- vpaddd %xmm4,%xmm12,%xmm12
- vpaddd %xmm5,%xmm13,%xmm13
- vpaddd %xmm6,%xmm14,%xmm14
- vpaddd %xmm7,%xmm15,%xmm15
- vpxor %xmm0,%xmm12,%xmm0
- vpxor %xmm1,%xmm13,%xmm1
- vpxor %xmm14,%xmm2,%xmm2
- vpxor %xmm15,%xmm3,%xmm3
-.byte 143,232,120,194,192,7
-.byte 143,232,120,194,201,7
-.byte 143,232,120,194,210,7
-.byte 143,232,120,194,219,7
- vpaddd %xmm1,%xmm8,%xmm8
- vpaddd %xmm2,%xmm9,%xmm9
- vpaddd %xmm3,%xmm10,%xmm10
- vpaddd %xmm0,%xmm11,%xmm11
- vpxor %xmm7,%xmm8,%xmm7
- vpxor %xmm4,%xmm9,%xmm4
- vpxor %xmm5,%xmm10,%xmm5
- vpxor %xmm6,%xmm11,%xmm6
-.byte 143,232,120,194,255,16
-.byte 143,232,120,194,228,16
-.byte 143,232,120,194,237,16
-.byte 143,232,120,194,246,16
- vpaddd %xmm7,%xmm14,%xmm14
- vpaddd %xmm4,%xmm15,%xmm15
- vpaddd %xmm5,%xmm12,%xmm12
- vpaddd %xmm6,%xmm13,%xmm13
- vpxor %xmm1,%xmm14,%xmm1
- vpxor %xmm2,%xmm15,%xmm2
- vpxor %xmm12,%xmm3,%xmm3
- vpxor %xmm13,%xmm0,%xmm0
-.byte 143,232,120,194,201,12
-.byte 143,232,120,194,210,12
-.byte 143,232,120,194,219,12
-.byte 143,232,120,194,192,12
- vpaddd %xmm8,%xmm1,%xmm8
- vpaddd %xmm9,%xmm2,%xmm9
- vpaddd %xmm3,%xmm10,%xmm10
- vpaddd %xmm0,%xmm11,%xmm11
- vpxor %xmm7,%xmm8,%xmm7
- vpxor %xmm4,%xmm9,%xmm4
- vpxor %xmm5,%xmm10,%xmm5
- vpxor %xmm6,%xmm11,%xmm6
-.byte 143,232,120,194,255,8
-.byte 143,232,120,194,228,8
-.byte 143,232,120,194,237,8
-.byte 143,232,120,194,246,8
- vpaddd %xmm7,%xmm14,%xmm14
- vpaddd %xmm4,%xmm15,%xmm15
- vpaddd %xmm5,%xmm12,%xmm12
- vpaddd %xmm6,%xmm13,%xmm13
- vpxor %xmm1,%xmm14,%xmm1
- vpxor %xmm2,%xmm15,%xmm2
- vpxor %xmm12,%xmm3,%xmm3
- vpxor %xmm13,%xmm0,%xmm0
-.byte 143,232,120,194,201,7
-.byte 143,232,120,194,210,7
-.byte 143,232,120,194,219,7
-.byte 143,232,120,194,192,7
- decl %eax
- jnz .Loop4xop
-
- vpaddd 64(%rsp),%xmm8,%xmm8
- vpaddd 80(%rsp),%xmm9,%xmm9
- vpaddd 96(%rsp),%xmm10,%xmm10
- vpaddd 112(%rsp),%xmm11,%xmm11
-
- vmovdqa %xmm14,32(%rsp)
- vmovdqa %xmm15,48(%rsp)
-
- vpunpckldq %xmm9,%xmm8,%xmm14
- vpunpckldq %xmm11,%xmm10,%xmm15
- vpunpckhdq %xmm9,%xmm8,%xmm8
- vpunpckhdq %xmm11,%xmm10,%xmm10
- vpunpcklqdq %xmm15,%xmm14,%xmm9
- vpunpckhqdq %xmm15,%xmm14,%xmm14
- vpunpcklqdq %xmm10,%xmm8,%xmm11
- vpunpckhqdq %xmm10,%xmm8,%xmm8
- vpaddd 128-256(%rcx),%xmm0,%xmm0
- vpaddd 144-256(%rcx),%xmm1,%xmm1
- vpaddd 160-256(%rcx),%xmm2,%xmm2
- vpaddd 176-256(%rcx),%xmm3,%xmm3
-
- vmovdqa %xmm9,0(%rsp)
- vmovdqa %xmm14,16(%rsp)
- vmovdqa 32(%rsp),%xmm9
- vmovdqa 48(%rsp),%xmm14
-
- vpunpckldq %xmm1,%xmm0,%xmm10
- vpunpckldq %xmm3,%xmm2,%xmm15
- vpunpckhdq %xmm1,%xmm0,%xmm0
- vpunpckhdq %xmm3,%xmm2,%xmm2
- vpunpcklqdq %xmm15,%xmm10,%xmm1
- vpunpckhqdq %xmm15,%xmm10,%xmm10
- vpunpcklqdq %xmm2,%xmm0,%xmm3
- vpunpckhqdq %xmm2,%xmm0,%xmm0
- vpaddd 192-256(%rcx),%xmm12,%xmm12
- vpaddd 208-256(%rcx),%xmm13,%xmm13
- vpaddd 224-256(%rcx),%xmm9,%xmm9
- vpaddd 240-256(%rcx),%xmm14,%xmm14
-
- vpunpckldq %xmm13,%xmm12,%xmm2
- vpunpckldq %xmm14,%xmm9,%xmm15
- vpunpckhdq %xmm13,%xmm12,%xmm12
- vpunpckhdq %xmm14,%xmm9,%xmm9
- vpunpcklqdq %xmm15,%xmm2,%xmm13
- vpunpckhqdq %xmm15,%xmm2,%xmm2
- vpunpcklqdq %xmm9,%xmm12,%xmm14
- vpunpckhqdq %xmm9,%xmm12,%xmm12
- vpaddd 256-256(%rcx),%xmm4,%xmm4
- vpaddd 272-256(%rcx),%xmm5,%xmm5
- vpaddd 288-256(%rcx),%xmm6,%xmm6
- vpaddd 304-256(%rcx),%xmm7,%xmm7
-
- vpunpckldq %xmm5,%xmm4,%xmm9
- vpunpckldq %xmm7,%xmm6,%xmm15
- vpunpckhdq %xmm5,%xmm4,%xmm4
- vpunpckhdq %xmm7,%xmm6,%xmm6
- vpunpcklqdq %xmm15,%xmm9,%xmm5
- vpunpckhqdq %xmm15,%xmm9,%xmm9
- vpunpcklqdq %xmm6,%xmm4,%xmm7
- vpunpckhqdq %xmm6,%xmm4,%xmm4
- vmovdqa 0(%rsp),%xmm6
- vmovdqa 16(%rsp),%xmm15
-
- cmpq $256,%rdx
- jb .Ltail4xop
-
- vpxor 0(%rsi),%xmm6,%xmm6
- vpxor 16(%rsi),%xmm1,%xmm1
- vpxor 32(%rsi),%xmm13,%xmm13
- vpxor 48(%rsi),%xmm5,%xmm5
- vpxor 64(%rsi),%xmm15,%xmm15
- vpxor 80(%rsi),%xmm10,%xmm10
- vpxor 96(%rsi),%xmm2,%xmm2
- vpxor 112(%rsi),%xmm9,%xmm9
- leaq 128(%rsi),%rsi
- vpxor 0(%rsi),%xmm11,%xmm11
- vpxor 16(%rsi),%xmm3,%xmm3
- vpxor 32(%rsi),%xmm14,%xmm14
- vpxor 48(%rsi),%xmm7,%xmm7
- vpxor 64(%rsi),%xmm8,%xmm8
- vpxor 80(%rsi),%xmm0,%xmm0
- vpxor 96(%rsi),%xmm12,%xmm12
- vpxor 112(%rsi),%xmm4,%xmm4
- leaq 128(%rsi),%rsi
-
- vmovdqu %xmm6,0(%rdi)
- vmovdqu %xmm1,16(%rdi)
- vmovdqu %xmm13,32(%rdi)
- vmovdqu %xmm5,48(%rdi)
- vmovdqu %xmm15,64(%rdi)
- vmovdqu %xmm10,80(%rdi)
- vmovdqu %xmm2,96(%rdi)
- vmovdqu %xmm9,112(%rdi)
- leaq 128(%rdi),%rdi
- vmovdqu %xmm11,0(%rdi)
- vmovdqu %xmm3,16(%rdi)
- vmovdqu %xmm14,32(%rdi)
- vmovdqu %xmm7,48(%rdi)
- vmovdqu %xmm8,64(%rdi)
- vmovdqu %xmm0,80(%rdi)
- vmovdqu %xmm12,96(%rdi)
- vmovdqu %xmm4,112(%rdi)
- leaq 128(%rdi),%rdi
-
- subq $256,%rdx
- jnz .Loop_outer4xop
-
- jmp .Ldone4xop
-
-.align 32
-.Ltail4xop:
- cmpq $192,%rdx
- jae .L192_or_more4xop
- cmpq $128,%rdx
- jae .L128_or_more4xop
- cmpq $64,%rdx
- jae .L64_or_more4xop
-
- xorq %r10,%r10
- vmovdqa %xmm6,0(%rsp)
- vmovdqa %xmm1,16(%rsp)
- vmovdqa %xmm13,32(%rsp)
- vmovdqa %xmm5,48(%rsp)
- jmp .Loop_tail4xop
-
-.align 32
-.L64_or_more4xop:
- vpxor 0(%rsi),%xmm6,%xmm6
- vpxor 16(%rsi),%xmm1,%xmm1
- vpxor 32(%rsi),%xmm13,%xmm13
- vpxor 48(%rsi),%xmm5,%xmm5
- vmovdqu %xmm6,0(%rdi)
- vmovdqu %xmm1,16(%rdi)
- vmovdqu %xmm13,32(%rdi)
- vmovdqu %xmm5,48(%rdi)
- je .Ldone4xop
-
- leaq 64(%rsi),%rsi
- vmovdqa %xmm15,0(%rsp)
- xorq %r10,%r10
- vmovdqa %xmm10,16(%rsp)
- leaq 64(%rdi),%rdi
- vmovdqa %xmm2,32(%rsp)
- subq $64,%rdx
- vmovdqa %xmm9,48(%rsp)
- jmp .Loop_tail4xop
-
-.align 32
-.L128_or_more4xop:
- vpxor 0(%rsi),%xmm6,%xmm6
- vpxor 16(%rsi),%xmm1,%xmm1
- vpxor 32(%rsi),%xmm13,%xmm13
- vpxor 48(%rsi),%xmm5,%xmm5
- vpxor 64(%rsi),%xmm15,%xmm15
- vpxor 80(%rsi),%xmm10,%xmm10
- vpxor 96(%rsi),%xmm2,%xmm2
- vpxor 112(%rsi),%xmm9,%xmm9
-
- vmovdqu %xmm6,0(%rdi)
- vmovdqu %xmm1,16(%rdi)
- vmovdqu %xmm13,32(%rdi)
- vmovdqu %xmm5,48(%rdi)
- vmovdqu %xmm15,64(%rdi)
- vmovdqu %xmm10,80(%rdi)
- vmovdqu %xmm2,96(%rdi)
- vmovdqu %xmm9,112(%rdi)
- je .Ldone4xop
-
- leaq 128(%rsi),%rsi
- vmovdqa %xmm11,0(%rsp)
- xorq %r10,%r10
- vmovdqa %xmm3,16(%rsp)
- leaq 128(%rdi),%rdi
- vmovdqa %xmm14,32(%rsp)
- subq $128,%rdx
- vmovdqa %xmm7,48(%rsp)
- jmp .Loop_tail4xop
-
-.align 32
-.L192_or_more4xop:
- vpxor 0(%rsi),%xmm6,%xmm6
- vpxor 16(%rsi),%xmm1,%xmm1
- vpxor 32(%rsi),%xmm13,%xmm13
- vpxor 48(%rsi),%xmm5,%xmm5
- vpxor 64(%rsi),%xmm15,%xmm15
- vpxor 80(%rsi),%xmm10,%xmm10
- vpxor 96(%rsi),%xmm2,%xmm2
- vpxor 112(%rsi),%xmm9,%xmm9
- leaq 128(%rsi),%rsi
- vpxor 0(%rsi),%xmm11,%xmm11
- vpxor 16(%rsi),%xmm3,%xmm3
- vpxor 32(%rsi),%xmm14,%xmm14
- vpxor 48(%rsi),%xmm7,%xmm7
-
- vmovdqu %xmm6,0(%rdi)
- vmovdqu %xmm1,16(%rdi)
- vmovdqu %xmm13,32(%rdi)
- vmovdqu %xmm5,48(%rdi)
- vmovdqu %xmm15,64(%rdi)
- vmovdqu %xmm10,80(%rdi)
- vmovdqu %xmm2,96(%rdi)
- vmovdqu %xmm9,112(%rdi)
- leaq 128(%rdi),%rdi
- vmovdqu %xmm11,0(%rdi)
- vmovdqu %xmm3,16(%rdi)
- vmovdqu %xmm14,32(%rdi)
- vmovdqu %xmm7,48(%rdi)
- je .Ldone4xop
-
- leaq 64(%rsi),%rsi
- vmovdqa %xmm8,0(%rsp)
- xorq %r10,%r10
- vmovdqa %xmm0,16(%rsp)
- leaq 64(%rdi),%rdi
- vmovdqa %xmm12,32(%rsp)
- subq $192,%rdx
- vmovdqa %xmm4,48(%rsp)
-
-.Loop_tail4xop:
- movzbl (%rsi,%r10,1),%eax
- movzbl (%rsp,%r10,1),%ecx
- leaq 1(%r10),%r10
- xorl %ecx,%eax
- movb %al,-1(%rdi,%r10,1)
- decq %rdx
- jnz .Loop_tail4xop
-
-.Ldone4xop:
- vzeroupper
- leaq (%r9),%rsp
-.cfi_def_cfa_register %rsp
-.L4xop_epilogue:
- .byte 0xf3,0xc3
-.cfi_endproc
-.size ChaCha20_4xop,.-ChaCha20_4xop
-.type ChaCha20_8x,@function
-.align 32
-ChaCha20_8x:
-.cfi_startproc
-.LChaCha20_8x:
- movq %rsp,%r9
-.cfi_def_cfa_register %r9
- subq $0x280+8,%rsp
- andq $-32,%rsp
- vzeroupper
-
-
-
-
-
-
-
-
-
-
- vbroadcasti128 .Lsigma(%rip),%ymm11
- vbroadcasti128 (%rcx),%ymm3
- vbroadcasti128 16(%rcx),%ymm15
- vbroadcasti128 (%r8),%ymm7
- leaq 256(%rsp),%rcx
- leaq 512(%rsp),%rax
- leaq .Lrot16(%rip),%r10
- leaq .Lrot24(%rip),%r11
-
- vpshufd $0x00,%ymm11,%ymm8
- vpshufd $0x55,%ymm11,%ymm9
- vmovdqa %ymm8,128-256(%rcx)
- vpshufd $0xaa,%ymm11,%ymm10
- vmovdqa %ymm9,160-256(%rcx)
- vpshufd $0xff,%ymm11,%ymm11
- vmovdqa %ymm10,192-256(%rcx)
- vmovdqa %ymm11,224-256(%rcx)
-
- vpshufd $0x00,%ymm3,%ymm0
- vpshufd $0x55,%ymm3,%ymm1
- vmovdqa %ymm0,256-256(%rcx)
- vpshufd $0xaa,%ymm3,%ymm2
- vmovdqa %ymm1,288-256(%rcx)
- vpshufd $0xff,%ymm3,%ymm3
- vmovdqa %ymm2,320-256(%rcx)
- vmovdqa %ymm3,352-256(%rcx)
-
- vpshufd $0x00,%ymm15,%ymm12
- vpshufd $0x55,%ymm15,%ymm13
- vmovdqa %ymm12,384-512(%rax)
- vpshufd $0xaa,%ymm15,%ymm14
- vmovdqa %ymm13,416-512(%rax)
- vpshufd $0xff,%ymm15,%ymm15
- vmovdqa %ymm14,448-512(%rax)
- vmovdqa %ymm15,480-512(%rax)
-
- vpshufd $0x00,%ymm7,%ymm4
- vpshufd $0x55,%ymm7,%ymm5
- vpaddd .Lincy(%rip),%ymm4,%ymm4
- vpshufd $0xaa,%ymm7,%ymm6
- vmovdqa %ymm5,544-512(%rax)
- vpshufd $0xff,%ymm7,%ymm7
- vmovdqa %ymm6,576-512(%rax)
- vmovdqa %ymm7,608-512(%rax)
-
- jmp .Loop_enter8x
-
-.align 32
-.Loop_outer8x:
- vmovdqa 128-256(%rcx),%ymm8
- vmovdqa 160-256(%rcx),%ymm9
- vmovdqa 192-256(%rcx),%ymm10
- vmovdqa 224-256(%rcx),%ymm11
- vmovdqa 256-256(%rcx),%ymm0
- vmovdqa 288-256(%rcx),%ymm1
- vmovdqa 320-256(%rcx),%ymm2
- vmovdqa 352-256(%rcx),%ymm3
- vmovdqa 384-512(%rax),%ymm12
- vmovdqa 416-512(%rax),%ymm13
- vmovdqa 448-512(%rax),%ymm14
- vmovdqa 480-512(%rax),%ymm15
- vmovdqa 512-512(%rax),%ymm4
- vmovdqa 544-512(%rax),%ymm5
- vmovdqa 576-512(%rax),%ymm6
- vmovdqa 608-512(%rax),%ymm7
- vpaddd .Leight(%rip),%ymm4,%ymm4
-
-.Loop_enter8x:
- vmovdqa %ymm14,64(%rsp)
- vmovdqa %ymm15,96(%rsp)
- vbroadcasti128 (%r10),%ymm15
- vmovdqa %ymm4,512-512(%rax)
- movl $10,%eax
- jmp .Loop8x
-
-.align 32
-.Loop8x:
- vpaddd %ymm0,%ymm8,%ymm8
- vpxor %ymm4,%ymm8,%ymm4
- vpshufb %ymm15,%ymm4,%ymm4
- vpaddd %ymm1,%ymm9,%ymm9
- vpxor %ymm5,%ymm9,%ymm5
- vpshufb %ymm15,%ymm5,%ymm5
- vpaddd %ymm4,%ymm12,%ymm12
- vpxor %ymm0,%ymm12,%ymm0
- vpslld $12,%ymm0,%ymm14
- vpsrld $20,%ymm0,%ymm0
- vpor %ymm0,%ymm14,%ymm0
- vbroadcasti128 (%r11),%ymm14
- vpaddd %ymm5,%ymm13,%ymm13
- vpxor %ymm1,%ymm13,%ymm1
- vpslld $12,%ymm1,%ymm15
- vpsrld $20,%ymm1,%ymm1
- vpor %ymm1,%ymm15,%ymm1
- vpaddd %ymm0,%ymm8,%ymm8
- vpxor %ymm4,%ymm8,%ymm4
- vpshufb %ymm14,%ymm4,%ymm4
- vpaddd %ymm1,%ymm9,%ymm9
- vpxor %ymm5,%ymm9,%ymm5
- vpshufb %ymm14,%ymm5,%ymm5
- vpaddd %ymm4,%ymm12,%ymm12
- vpxor %ymm0,%ymm12,%ymm0
- vpslld $7,%ymm0,%ymm15
- vpsrld $25,%ymm0,%ymm0
- vpor %ymm0,%ymm15,%ymm0
- vbroadcasti128 (%r10),%ymm15
- vpaddd %ymm5,%ymm13,%ymm13
- vpxor %ymm1,%ymm13,%ymm1
- vpslld $7,%ymm1,%ymm14
- vpsrld $25,%ymm1,%ymm1
- vpor %ymm1,%ymm14,%ymm1
- vmovdqa %ymm12,0(%rsp)
- vmovdqa %ymm13,32(%rsp)
- vmovdqa 64(%rsp),%ymm12
- vmovdqa 96(%rsp),%ymm13
- vpaddd %ymm2,%ymm10,%ymm10
- vpxor %ymm6,%ymm10,%ymm6
- vpshufb %ymm15,%ymm6,%ymm6
- vpaddd %ymm3,%ymm11,%ymm11
- vpxor %ymm7,%ymm11,%ymm7
- vpshufb %ymm15,%ymm7,%ymm7
- vpaddd %ymm6,%ymm12,%ymm12
- vpxor %ymm2,%ymm12,%ymm2
- vpslld $12,%ymm2,%ymm14
- vpsrld $20,%ymm2,%ymm2
- vpor %ymm2,%ymm14,%ymm2
- vbroadcasti128 (%r11),%ymm14
- vpaddd %ymm7,%ymm13,%ymm13
- vpxor %ymm3,%ymm13,%ymm3
- vpslld $12,%ymm3,%ymm15
- vpsrld $20,%ymm3,%ymm3
- vpor %ymm3,%ymm15,%ymm3
- vpaddd %ymm2,%ymm10,%ymm10
- vpxor %ymm6,%ymm10,%ymm6
- vpshufb %ymm14,%ymm6,%ymm6
- vpaddd %ymm3,%ymm11,%ymm11
- vpxor %ymm7,%ymm11,%ymm7
- vpshufb %ymm14,%ymm7,%ymm7
- vpaddd %ymm6,%ymm12,%ymm12
- vpxor %ymm2,%ymm12,%ymm2
- vpslld $7,%ymm2,%ymm15
- vpsrld $25,%ymm2,%ymm2
- vpor %ymm2,%ymm15,%ymm2
- vbroadcasti128 (%r10),%ymm15
- vpaddd %ymm7,%ymm13,%ymm13
- vpxor %ymm3,%ymm13,%ymm3
- vpslld $7,%ymm3,%ymm14
- vpsrld $25,%ymm3,%ymm3
- vpor %ymm3,%ymm14,%ymm3
- vpaddd %ymm1,%ymm8,%ymm8
- vpxor %ymm7,%ymm8,%ymm7
- vpshufb %ymm15,%ymm7,%ymm7
- vpaddd %ymm2,%ymm9,%ymm9
- vpxor %ymm4,%ymm9,%ymm4
- vpshufb %ymm15,%ymm4,%ymm4
- vpaddd %ymm7,%ymm12,%ymm12
- vpxor %ymm1,%ymm12,%ymm1
- vpslld $12,%ymm1,%ymm14
- vpsrld $20,%ymm1,%ymm1
- vpor %ymm1,%ymm14,%ymm1
- vbroadcasti128 (%r11),%ymm14
- vpaddd %ymm4,%ymm13,%ymm13
- vpxor %ymm2,%ymm13,%ymm2
- vpslld $12,%ymm2,%ymm15
- vpsrld $20,%ymm2,%ymm2
- vpor %ymm2,%ymm15,%ymm2
- vpaddd %ymm1,%ymm8,%ymm8
- vpxor %ymm7,%ymm8,%ymm7
- vpshufb %ymm14,%ymm7,%ymm7
- vpaddd %ymm2,%ymm9,%ymm9
- vpxor %ymm4,%ymm9,%ymm4
- vpshufb %ymm14,%ymm4,%ymm4
- vpaddd %ymm7,%ymm12,%ymm12
- vpxor %ymm1,%ymm12,%ymm1
- vpslld $7,%ymm1,%ymm15
- vpsrld $25,%ymm1,%ymm1
- vpor %ymm1,%ymm15,%ymm1
- vbroadcasti128 (%r10),%ymm15
- vpaddd %ymm4,%ymm13,%ymm13
- vpxor %ymm2,%ymm13,%ymm2
- vpslld $7,%ymm2,%ymm14
- vpsrld $25,%ymm2,%ymm2
- vpor %ymm2,%ymm14,%ymm2
- vmovdqa %ymm12,64(%rsp)
- vmovdqa %ymm13,96(%rsp)
- vmovdqa 0(%rsp),%ymm12
- vmovdqa 32(%rsp),%ymm13
- vpaddd %ymm3,%ymm10,%ymm10
- vpxor %ymm5,%ymm10,%ymm5
- vpshufb %ymm15,%ymm5,%ymm5
- vpaddd %ymm0,%ymm11,%ymm11
- vpxor %ymm6,%ymm11,%ymm6
- vpshufb %ymm15,%ymm6,%ymm6
- vpaddd %ymm5,%ymm12,%ymm12
- vpxor %ymm3,%ymm12,%ymm3
- vpslld $12,%ymm3,%ymm14
- vpsrld $20,%ymm3,%ymm3
- vpor %ymm3,%ymm14,%ymm3
- vbroadcasti128 (%r11),%ymm14
- vpaddd %ymm6,%ymm13,%ymm13
- vpxor %ymm0,%ymm13,%ymm0
- vpslld $12,%ymm0,%ymm15
- vpsrld $20,%ymm0,%ymm0
- vpor %ymm0,%ymm15,%ymm0
- vpaddd %ymm3,%ymm10,%ymm10
- vpxor %ymm5,%ymm10,%ymm5
- vpshufb %ymm14,%ymm5,%ymm5
- vpaddd %ymm0,%ymm11,%ymm11
- vpxor %ymm6,%ymm11,%ymm6
- vpshufb %ymm14,%ymm6,%ymm6
- vpaddd %ymm5,%ymm12,%ymm12
- vpxor %ymm3,%ymm12,%ymm3
- vpslld $7,%ymm3,%ymm15
- vpsrld $25,%ymm3,%ymm3
- vpor %ymm3,%ymm15,%ymm3
- vbroadcasti128 (%r10),%ymm15
- vpaddd %ymm6,%ymm13,%ymm13
- vpxor %ymm0,%ymm13,%ymm0
- vpslld $7,%ymm0,%ymm14
- vpsrld $25,%ymm0,%ymm0
- vpor %ymm0,%ymm14,%ymm0
- decl %eax
- jnz .Loop8x
-
- leaq 512(%rsp),%rax
- vpaddd 128-256(%rcx),%ymm8,%ymm8
- vpaddd 160-256(%rcx),%ymm9,%ymm9
- vpaddd 192-256(%rcx),%ymm10,%ymm10
- vpaddd 224-256(%rcx),%ymm11,%ymm11
-
- vpunpckldq %ymm9,%ymm8,%ymm14
- vpunpckldq %ymm11,%ymm10,%ymm15
- vpunpckhdq %ymm9,%ymm8,%ymm8
- vpunpckhdq %ymm11,%ymm10,%ymm10
- vpunpcklqdq %ymm15,%ymm14,%ymm9
- vpunpckhqdq %ymm15,%ymm14,%ymm14
- vpunpcklqdq %ymm10,%ymm8,%ymm11
- vpunpckhqdq %ymm10,%ymm8,%ymm8
- vpaddd 256-256(%rcx),%ymm0,%ymm0
- vpaddd 288-256(%rcx),%ymm1,%ymm1
- vpaddd 320-256(%rcx),%ymm2,%ymm2
- vpaddd 352-256(%rcx),%ymm3,%ymm3
-
- vpunpckldq %ymm1,%ymm0,%ymm10
- vpunpckldq %ymm3,%ymm2,%ymm15
- vpunpckhdq %ymm1,%ymm0,%ymm0
- vpunpckhdq %ymm3,%ymm2,%ymm2
- vpunpcklqdq %ymm15,%ymm10,%ymm1
- vpunpckhqdq %ymm15,%ymm10,%ymm10
- vpunpcklqdq %ymm2,%ymm0,%ymm3
- vpunpckhqdq %ymm2,%ymm0,%ymm0
- vperm2i128 $0x20,%ymm1,%ymm9,%ymm15
- vperm2i128 $0x31,%ymm1,%ymm9,%ymm1
- vperm2i128 $0x20,%ymm10,%ymm14,%ymm9
- vperm2i128 $0x31,%ymm10,%ymm14,%ymm10
- vperm2i128 $0x20,%ymm3,%ymm11,%ymm14
- vperm2i128 $0x31,%ymm3,%ymm11,%ymm3
- vperm2i128 $0x20,%ymm0,%ymm8,%ymm11
- vperm2i128 $0x31,%ymm0,%ymm8,%ymm0
- vmovdqa %ymm15,0(%rsp)
- vmovdqa %ymm9,32(%rsp)
- vmovdqa 64(%rsp),%ymm15
- vmovdqa 96(%rsp),%ymm9
-
- vpaddd 384-512(%rax),%ymm12,%ymm12
- vpaddd 416-512(%rax),%ymm13,%ymm13
- vpaddd 448-512(%rax),%ymm15,%ymm15
- vpaddd 480-512(%rax),%ymm9,%ymm9
-
- vpunpckldq %ymm13,%ymm12,%ymm2
- vpunpckldq %ymm9,%ymm15,%ymm8
- vpunpckhdq %ymm13,%ymm12,%ymm12
- vpunpckhdq %ymm9,%ymm15,%ymm15
- vpunpcklqdq %ymm8,%ymm2,%ymm13
- vpunpckhqdq %ymm8,%ymm2,%ymm2
- vpunpcklqdq %ymm15,%ymm12,%ymm9
- vpunpckhqdq %ymm15,%ymm12,%ymm12
- vpaddd 512-512(%rax),%ymm4,%ymm4
- vpaddd 544-512(%rax),%ymm5,%ymm5
- vpaddd 576-512(%rax),%ymm6,%ymm6
- vpaddd 608-512(%rax),%ymm7,%ymm7
-
- vpunpckldq %ymm5,%ymm4,%ymm15
- vpunpckldq %ymm7,%ymm6,%ymm8
- vpunpckhdq %ymm5,%ymm4,%ymm4
- vpunpckhdq %ymm7,%ymm6,%ymm6
- vpunpcklqdq %ymm8,%ymm15,%ymm5
- vpunpckhqdq %ymm8,%ymm15,%ymm15
- vpunpcklqdq %ymm6,%ymm4,%ymm7
- vpunpckhqdq %ymm6,%ymm4,%ymm4
- vperm2i128 $0x20,%ymm5,%ymm13,%ymm8
- vperm2i128 $0x31,%ymm5,%ymm13,%ymm5
- vperm2i128 $0x20,%ymm15,%ymm2,%ymm13
- vperm2i128 $0x31,%ymm15,%ymm2,%ymm15
- vperm2i128 $0x20,%ymm7,%ymm9,%ymm2
- vperm2i128 $0x31,%ymm7,%ymm9,%ymm7
- vperm2i128 $0x20,%ymm4,%ymm12,%ymm9
- vperm2i128 $0x31,%ymm4,%ymm12,%ymm4
- vmovdqa 0(%rsp),%ymm6
- vmovdqa 32(%rsp),%ymm12
-
- cmpq $512,%rdx
- jb .Ltail8x
-
- vpxor 0(%rsi),%ymm6,%ymm6
- vpxor 32(%rsi),%ymm8,%ymm8
- vpxor 64(%rsi),%ymm1,%ymm1
- vpxor 96(%rsi),%ymm5,%ymm5
- leaq 128(%rsi),%rsi
- vmovdqu %ymm6,0(%rdi)
- vmovdqu %ymm8,32(%rdi)
- vmovdqu %ymm1,64(%rdi)
- vmovdqu %ymm5,96(%rdi)
- leaq 128(%rdi),%rdi
-
- vpxor 0(%rsi),%ymm12,%ymm12
- vpxor 32(%rsi),%ymm13,%ymm13
- vpxor 64(%rsi),%ymm10,%ymm10
- vpxor 96(%rsi),%ymm15,%ymm15
- leaq 128(%rsi),%rsi
- vmovdqu %ymm12,0(%rdi)
- vmovdqu %ymm13,32(%rdi)
- vmovdqu %ymm10,64(%rdi)
- vmovdqu %ymm15,96(%rdi)
- leaq 128(%rdi),%rdi
-
- vpxor 0(%rsi),%ymm14,%ymm14
- vpxor 32(%rsi),%ymm2,%ymm2
- vpxor 64(%rsi),%ymm3,%ymm3
- vpxor 96(%rsi),%ymm7,%ymm7
- leaq 128(%rsi),%rsi
- vmovdqu %ymm14,0(%rdi)
- vmovdqu %ymm2,32(%rdi)
- vmovdqu %ymm3,64(%rdi)
- vmovdqu %ymm7,96(%rdi)
- leaq 128(%rdi),%rdi
-
- vpxor 0(%rsi),%ymm11,%ymm11
- vpxor 32(%rsi),%ymm9,%ymm9
- vpxor 64(%rsi),%ymm0,%ymm0
- vpxor 96(%rsi),%ymm4,%ymm4
- leaq 128(%rsi),%rsi
- vmovdqu %ymm11,0(%rdi)
- vmovdqu %ymm9,32(%rdi)
- vmovdqu %ymm0,64(%rdi)
- vmovdqu %ymm4,96(%rdi)
- leaq 128(%rdi),%rdi
-
- subq $512,%rdx
- jnz .Loop_outer8x
-
- jmp .Ldone8x
-
-.Ltail8x:
- cmpq $448,%rdx
- jae .L448_or_more8x
- cmpq $384,%rdx
- jae .L384_or_more8x
- cmpq $320,%rdx
- jae .L320_or_more8x
- cmpq $256,%rdx
- jae .L256_or_more8x
- cmpq $192,%rdx
- jae .L192_or_more8x
- cmpq $128,%rdx
- jae .L128_or_more8x
- cmpq $64,%rdx
- jae .L64_or_more8x
-
- xorq %r10,%r10
- vmovdqa %ymm6,0(%rsp)
- vmovdqa %ymm8,32(%rsp)
- jmp .Loop_tail8x
-
-.align 32
-.L64_or_more8x:
- vpxor 0(%rsi),%ymm6,%ymm6
- vpxor 32(%rsi),%ymm8,%ymm8
- vmovdqu %ymm6,0(%rdi)
- vmovdqu %ymm8,32(%rdi)
- je .Ldone8x
-
- leaq 64(%rsi),%rsi
- xorq %r10,%r10
- vmovdqa %ymm1,0(%rsp)
- leaq 64(%rdi),%rdi
- subq $64,%rdx
- vmovdqa %ymm5,32(%rsp)
- jmp .Loop_tail8x
-
-.align 32
-.L128_or_more8x:
- vpxor 0(%rsi),%ymm6,%ymm6
- vpxor 32(%rsi),%ymm8,%ymm8
- vpxor 64(%rsi),%ymm1,%ymm1
- vpxor 96(%rsi),%ymm5,%ymm5
- vmovdqu %ymm6,0(%rdi)
- vmovdqu %ymm8,32(%rdi)
- vmovdqu %ymm1,64(%rdi)
- vmovdqu %ymm5,96(%rdi)
- je .Ldone8x
-
- leaq 128(%rsi),%rsi
- xorq %r10,%r10
- vmovdqa %ymm12,0(%rsp)
- leaq 128(%rdi),%rdi
- subq $128,%rdx
- vmovdqa %ymm13,32(%rsp)
- jmp .Loop_tail8x
-
-.align 32
-.L192_or_more8x:
- vpxor 0(%rsi),%ymm6,%ymm6
- vpxor 32(%rsi),%ymm8,%ymm8
- vpxor 64(%rsi),%ymm1,%ymm1
- vpxor 96(%rsi),%ymm5,%ymm5
- vpxor 128(%rsi),%ymm12,%ymm12
- vpxor 160(%rsi),%ymm13,%ymm13
- vmovdqu %ymm6,0(%rdi)
- vmovdqu %ymm8,32(%rdi)
- vmovdqu %ymm1,64(%rdi)
- vmovdqu %ymm5,96(%rdi)
- vmovdqu %ymm12,128(%rdi)
- vmovdqu %ymm13,160(%rdi)
- je .Ldone8x
-
- leaq 192(%rsi),%rsi
- xorq %r10,%r10
- vmovdqa %ymm10,0(%rsp)
- leaq 192(%rdi),%rdi
- subq $192,%rdx
- vmovdqa %ymm15,32(%rsp)
- jmp .Loop_tail8x
-
-.align 32
-.L256_or_more8x:
- vpxor 0(%rsi),%ymm6,%ymm6
- vpxor 32(%rsi),%ymm8,%ymm8
- vpxor 64(%rsi),%ymm1,%ymm1
- vpxor 96(%rsi),%ymm5,%ymm5
- vpxor 128(%rsi),%ymm12,%ymm12
- vpxor 160(%rsi),%ymm13,%ymm13
- vpxor 192(%rsi),%ymm10,%ymm10
- vpxor 224(%rsi),%ymm15,%ymm15
- vmovdqu %ymm6,0(%rdi)
- vmovdqu %ymm8,32(%rdi)
- vmovdqu %ymm1,64(%rdi)
- vmovdqu %ymm5,96(%rdi)
- vmovdqu %ymm12,128(%rdi)
- vmovdqu %ymm13,160(%rdi)
- vmovdqu %ymm10,192(%rdi)
- vmovdqu %ymm15,224(%rdi)
- je .Ldone8x
-
- leaq 256(%rsi),%rsi
- xorq %r10,%r10
- vmovdqa %ymm14,0(%rsp)
- leaq 256(%rdi),%rdi
- subq $256,%rdx
- vmovdqa %ymm2,32(%rsp)
- jmp .Loop_tail8x
-
-.align 32
-.L320_or_more8x:
- vpxor 0(%rsi),%ymm6,%ymm6
- vpxor 32(%rsi),%ymm8,%ymm8
- vpxor 64(%rsi),%ymm1,%ymm1
- vpxor 96(%rsi),%ymm5,%ymm5
- vpxor 128(%rsi),%ymm12,%ymm12
- vpxor 160(%rsi),%ymm13,%ymm13
- vpxor 192(%rsi),%ymm10,%ymm10
- vpxor 224(%rsi),%ymm15,%ymm15
- vpxor 256(%rsi),%ymm14,%ymm14
- vpxor 288(%rsi),%ymm2,%ymm2
- vmovdqu %ymm6,0(%rdi)
- vmovdqu %ymm8,32(%rdi)
- vmovdqu %ymm1,64(%rdi)
- vmovdqu %ymm5,96(%rdi)
- vmovdqu %ymm12,128(%rdi)
- vmovdqu %ymm13,160(%rdi)
- vmovdqu %ymm10,192(%rdi)
- vmovdqu %ymm15,224(%rdi)
- vmovdqu %ymm14,256(%rdi)
- vmovdqu %ymm2,288(%rdi)
- je .Ldone8x
-
- leaq 320(%rsi),%rsi
- xorq %r10,%r10
- vmovdqa %ymm3,0(%rsp)
- leaq 320(%rdi),%rdi
- subq $320,%rdx
- vmovdqa %ymm7,32(%rsp)
- jmp .Loop_tail8x
-
-.align 32
-.L384_or_more8x:
- vpxor 0(%rsi),%ymm6,%ymm6
- vpxor 32(%rsi),%ymm8,%ymm8
- vpxor 64(%rsi),%ymm1,%ymm1
- vpxor 96(%rsi),%ymm5,%ymm5
- vpxor 128(%rsi),%ymm12,%ymm12
- vpxor 160(%rsi),%ymm13,%ymm13
- vpxor 192(%rsi),%ymm10,%ymm10
- vpxor 224(%rsi),%ymm15,%ymm15
- vpxor 256(%rsi),%ymm14,%ymm14
- vpxor 288(%rsi),%ymm2,%ymm2
- vpxor 320(%rsi),%ymm3,%ymm3
- vpxor 352(%rsi),%ymm7,%ymm7
- vmovdqu %ymm6,0(%rdi)
- vmovdqu %ymm8,32(%rdi)
- vmovdqu %ymm1,64(%rdi)
- vmovdqu %ymm5,96(%rdi)
- vmovdqu %ymm12,128(%rdi)
- vmovdqu %ymm13,160(%rdi)
- vmovdqu %ymm10,192(%rdi)
- vmovdqu %ymm15,224(%rdi)
- vmovdqu %ymm14,256(%rdi)
- vmovdqu %ymm2,288(%rdi)
- vmovdqu %ymm3,320(%rdi)
- vmovdqu %ymm7,352(%rdi)
- je .Ldone8x
-
- leaq 384(%rsi),%rsi
- xorq %r10,%r10
- vmovdqa %ymm11,0(%rsp)
- leaq 384(%rdi),%rdi
- subq $384,%rdx
- vmovdqa %ymm9,32(%rsp)
- jmp .Loop_tail8x
-
-.align 32
-.L448_or_more8x:
- vpxor 0(%rsi),%ymm6,%ymm6
- vpxor 32(%rsi),%ymm8,%ymm8
- vpxor 64(%rsi),%ymm1,%ymm1
- vpxor 96(%rsi),%ymm5,%ymm5
- vpxor 128(%rsi),%ymm12,%ymm12
- vpxor 160(%rsi),%ymm13,%ymm13
- vpxor 192(%rsi),%ymm10,%ymm10
- vpxor 224(%rsi),%ymm15,%ymm15
- vpxor 256(%rsi),%ymm14,%ymm14
- vpxor 288(%rsi),%ymm2,%ymm2
- vpxor 320(%rsi),%ymm3,%ymm3
- vpxor 352(%rsi),%ymm7,%ymm7
- vpxor 384(%rsi),%ymm11,%ymm11
- vpxor 416(%rsi),%ymm9,%ymm9
- vmovdqu %ymm6,0(%rdi)
- vmovdqu %ymm8,32(%rdi)
- vmovdqu %ymm1,64(%rdi)
- vmovdqu %ymm5,96(%rdi)
- vmovdqu %ymm12,128(%rdi)
- vmovdqu %ymm13,160(%rdi)
- vmovdqu %ymm10,192(%rdi)
- vmovdqu %ymm15,224(%rdi)
- vmovdqu %ymm14,256(%rdi)
- vmovdqu %ymm2,288(%rdi)
- vmovdqu %ymm3,320(%rdi)
- vmovdqu %ymm7,352(%rdi)
- vmovdqu %ymm11,384(%rdi)
- vmovdqu %ymm9,416(%rdi)
- je .Ldone8x
-
- leaq 448(%rsi),%rsi
- xorq %r10,%r10
- vmovdqa %ymm0,0(%rsp)
- leaq 448(%rdi),%rdi
- subq $448,%rdx
- vmovdqa %ymm4,32(%rsp)
-
-.Loop_tail8x:
- movzbl (%rsi,%r10,1),%eax
- movzbl (%rsp,%r10,1),%ecx
- leaq 1(%r10),%r10
- xorl %ecx,%eax
- movb %al,-1(%rdi,%r10,1)
- decq %rdx
- jnz .Loop_tail8x
-
-.Ldone8x:
- vzeroall
- leaq (%r9),%rsp
-.cfi_def_cfa_register %rsp
-.L8x_epilogue:
- .byte 0xf3,0xc3
-.cfi_endproc
-.size ChaCha20_8x,.-ChaCha20_8x
diff --git a/secure/lib/libcrypto/amd64/cmll-x86_64.S b/secure/lib/libcrypto/amd64/cmll-x86_64.S
index 7feb198a7c1cc..d1d284b5a32ba 100644
--- a/secure/lib/libcrypto/amd64/cmll-x86_64.S
+++ b/secure/lib/libcrypto/amd64/cmll-x86_64.S
@@ -7,11 +7,13 @@
.type Camellia_EncryptBlock,@function
.align 16
Camellia_EncryptBlock:
+.cfi_startproc
movl $128,%eax
subl %edi,%eax
movl $3,%edi
adcl $0,%edi
jmp .Lenc_rounds
+.cfi_endproc
.size Camellia_EncryptBlock,.-Camellia_EncryptBlock
.globl Camellia_EncryptBlock_Rounds
@@ -85,6 +87,7 @@ Camellia_EncryptBlock_Rounds:
.type _x86_64_Camellia_encrypt,@function
.align 16
_x86_64_Camellia_encrypt:
+.cfi_startproc
xorl 0(%r14),%r9d
xorl 4(%r14),%r8d
xorl 8(%r14),%r11d
@@ -287,6 +290,7 @@ _x86_64_Camellia_encrypt:
movl %edx,%r11d
.byte 0xf3,0xc3
+.cfi_endproc
.size _x86_64_Camellia_encrypt,.-_x86_64_Camellia_encrypt
@@ -294,11 +298,13 @@ _x86_64_Camellia_encrypt:
.type Camellia_DecryptBlock,@function
.align 16
Camellia_DecryptBlock:
+.cfi_startproc
movl $128,%eax
subl %edi,%eax
movl $3,%edi
adcl $0,%edi
jmp .Ldec_rounds
+.cfi_endproc
.size Camellia_DecryptBlock,.-Camellia_DecryptBlock
.globl Camellia_DecryptBlock_Rounds
@@ -372,6 +378,7 @@ Camellia_DecryptBlock_Rounds:
.type _x86_64_Camellia_decrypt,@function
.align 16
_x86_64_Camellia_decrypt:
+.cfi_startproc
xorl 0(%r14),%r9d
xorl 4(%r14),%r8d
xorl 8(%r14),%r11d
@@ -575,6 +582,7 @@ _x86_64_Camellia_decrypt:
movl %ebx,%r11d
.byte 0xf3,0xc3
+.cfi_endproc
.size _x86_64_Camellia_decrypt,.-_x86_64_Camellia_decrypt
.globl Camellia_Ekeygen
.type Camellia_Ekeygen,@function
diff --git a/secure/lib/libcrypto/amd64/ecp_nistz256-x86_64.S b/secure/lib/libcrypto/amd64/ecp_nistz256-x86_64.S
index 1176feea40c24..c69b4d978f390 100644
--- a/secure/lib/libcrypto/amd64/ecp_nistz256-x86_64.S
+++ b/secure/lib/libcrypto/amd64/ecp_nistz256-x86_64.S
@@ -2790,10 +2790,6 @@ ecp_nistz256_neg:
.align 32
ecp_nistz256_ord_mul_mont:
.cfi_startproc
- movl $0x80100,%ecx
- andl OPENSSL_ia32cap_P+8(%rip),%ecx
- cmpl $0x80100,%ecx
- je .Lecp_nistz256_ord_mul_montx
pushq %rbp
.cfi_adjust_cfa_offset 8
.cfi_offset %rbp,-16
@@ -3122,10 +3118,6 @@ ecp_nistz256_ord_mul_mont:
.align 32
ecp_nistz256_ord_sqr_mont:
.cfi_startproc
- movl $0x80100,%ecx
- andl OPENSSL_ia32cap_P+8(%rip),%ecx
- cmpl $0x80100,%ecx
- je .Lecp_nistz256_ord_sqr_montx
pushq %rbp
.cfi_adjust_cfa_offset 8
.cfi_offset %rbp,-16
@@ -3413,462 +3405,6 @@ ecp_nistz256_ord_sqr_mont:
.cfi_endproc
.size ecp_nistz256_ord_sqr_mont,.-ecp_nistz256_ord_sqr_mont
-.type ecp_nistz256_ord_mul_montx,@function
-.align 32
-ecp_nistz256_ord_mul_montx:
-.cfi_startproc
-.Lecp_nistz256_ord_mul_montx:
- pushq %rbp
-.cfi_adjust_cfa_offset 8
-.cfi_offset %rbp,-16
- pushq %rbx
-.cfi_adjust_cfa_offset 8
-.cfi_offset %rbx,-24
- pushq %r12
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r12,-32
- pushq %r13
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r13,-40
- pushq %r14
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r14,-48
- pushq %r15
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r15,-56
-.Lord_mulx_body:
-
- movq %rdx,%rbx
- movq 0(%rdx),%rdx
- movq 0(%rsi),%r9
- movq 8(%rsi),%r10
- movq 16(%rsi),%r11
- movq 24(%rsi),%r12
- leaq -128(%rsi),%rsi
- leaq .Lord-128(%rip),%r14
- movq .LordK(%rip),%r15
-
-
- mulxq %r9,%r8,%r9
- mulxq %r10,%rcx,%r10
- mulxq %r11,%rbp,%r11
- addq %rcx,%r9
- mulxq %r12,%rcx,%r12
- movq %r8,%rdx
- mulxq %r15,%rdx,%rax
- adcq %rbp,%r10
- adcq %rcx,%r11
- adcq $0,%r12
-
-
- xorq %r13,%r13
- mulxq 0+128(%r14),%rcx,%rbp
- adcxq %rcx,%r8
- adoxq %rbp,%r9
-
- mulxq 8+128(%r14),%rcx,%rbp
- adcxq %rcx,%r9
- adoxq %rbp,%r10
-
- mulxq 16+128(%r14),%rcx,%rbp
- adcxq %rcx,%r10
- adoxq %rbp,%r11
-
- mulxq 24+128(%r14),%rcx,%rbp
- movq 8(%rbx),%rdx
- adcxq %rcx,%r11
- adoxq %rbp,%r12
- adcxq %r8,%r12
- adoxq %r8,%r13
- adcq $0,%r13
-
-
- mulxq 0+128(%rsi),%rcx,%rbp
- adcxq %rcx,%r9
- adoxq %rbp,%r10
-
- mulxq 8+128(%rsi),%rcx,%rbp
- adcxq %rcx,%r10
- adoxq %rbp,%r11
-
- mulxq 16+128(%rsi),%rcx,%rbp
- adcxq %rcx,%r11
- adoxq %rbp,%r12
-
- mulxq 24+128(%rsi),%rcx,%rbp
- movq %r9,%rdx
- mulxq %r15,%rdx,%rax
- adcxq %rcx,%r12
- adoxq %rbp,%r13
-
- adcxq %r8,%r13
- adoxq %r8,%r8
- adcq $0,%r8
-
-
- mulxq 0+128(%r14),%rcx,%rbp
- adcxq %rcx,%r9
- adoxq %rbp,%r10
-
- mulxq 8+128(%r14),%rcx,%rbp
- adcxq %rcx,%r10
- adoxq %rbp,%r11
-
- mulxq 16+128(%r14),%rcx,%rbp
- adcxq %rcx,%r11
- adoxq %rbp,%r12
-
- mulxq 24+128(%r14),%rcx,%rbp
- movq 16(%rbx),%rdx
- adcxq %rcx,%r12
- adoxq %rbp,%r13
- adcxq %r9,%r13
- adoxq %r9,%r8
- adcq $0,%r8
-
-
- mulxq 0+128(%rsi),%rcx,%rbp
- adcxq %rcx,%r10
- adoxq %rbp,%r11
-
- mulxq 8+128(%rsi),%rcx,%rbp
- adcxq %rcx,%r11
- adoxq %rbp,%r12
-
- mulxq 16+128(%rsi),%rcx,%rbp
- adcxq %rcx,%r12
- adoxq %rbp,%r13
-
- mulxq 24+128(%rsi),%rcx,%rbp
- movq %r10,%rdx
- mulxq %r15,%rdx,%rax
- adcxq %rcx,%r13
- adoxq %rbp,%r8
-
- adcxq %r9,%r8
- adoxq %r9,%r9
- adcq $0,%r9
-
-
- mulxq 0+128(%r14),%rcx,%rbp
- adcxq %rcx,%r10
- adoxq %rbp,%r11
-
- mulxq 8+128(%r14),%rcx,%rbp
- adcxq %rcx,%r11
- adoxq %rbp,%r12
-
- mulxq 16+128(%r14),%rcx,%rbp
- adcxq %rcx,%r12
- adoxq %rbp,%r13
-
- mulxq 24+128(%r14),%rcx,%rbp
- movq 24(%rbx),%rdx
- adcxq %rcx,%r13
- adoxq %rbp,%r8
- adcxq %r10,%r8
- adoxq %r10,%r9
- adcq $0,%r9
-
-
- mulxq 0+128(%rsi),%rcx,%rbp
- adcxq %rcx,%r11
- adoxq %rbp,%r12
-
- mulxq 8+128(%rsi),%rcx,%rbp
- adcxq %rcx,%r12
- adoxq %rbp,%r13
-
- mulxq 16+128(%rsi),%rcx,%rbp
- adcxq %rcx,%r13
- adoxq %rbp,%r8
-
- mulxq 24+128(%rsi),%rcx,%rbp
- movq %r11,%rdx
- mulxq %r15,%rdx,%rax
- adcxq %rcx,%r8
- adoxq %rbp,%r9
-
- adcxq %r10,%r9
- adoxq %r10,%r10
- adcq $0,%r10
-
-
- mulxq 0+128(%r14),%rcx,%rbp
- adcxq %rcx,%r11
- adoxq %rbp,%r12
-
- mulxq 8+128(%r14),%rcx,%rbp
- adcxq %rcx,%r12
- adoxq %rbp,%r13
-
- mulxq 16+128(%r14),%rcx,%rbp
- adcxq %rcx,%r13
- adoxq %rbp,%r8
-
- mulxq 24+128(%r14),%rcx,%rbp
- leaq 128(%r14),%r14
- movq %r12,%rbx
- adcxq %rcx,%r8
- adoxq %rbp,%r9
- movq %r13,%rdx
- adcxq %r11,%r9
- adoxq %r11,%r10
- adcq $0,%r10
-
-
-
- movq %r8,%rcx
- subq 0(%r14),%r12
- sbbq 8(%r14),%r13
- sbbq 16(%r14),%r8
- movq %r9,%rbp
- sbbq 24(%r14),%r9
- sbbq $0,%r10
-
- cmovcq %rbx,%r12
- cmovcq %rdx,%r13
- cmovcq %rcx,%r8
- cmovcq %rbp,%r9
-
- movq %r12,0(%rdi)
- movq %r13,8(%rdi)
- movq %r8,16(%rdi)
- movq %r9,24(%rdi)
-
- movq 0(%rsp),%r15
-.cfi_restore %r15
- movq 8(%rsp),%r14
-.cfi_restore %r14
- movq 16(%rsp),%r13
-.cfi_restore %r13
- movq 24(%rsp),%r12
-.cfi_restore %r12
- movq 32(%rsp),%rbx
-.cfi_restore %rbx
- movq 40(%rsp),%rbp
-.cfi_restore %rbp
- leaq 48(%rsp),%rsp
-.cfi_adjust_cfa_offset -48
-.Lord_mulx_epilogue:
- .byte 0xf3,0xc3
-.cfi_endproc
-.size ecp_nistz256_ord_mul_montx,.-ecp_nistz256_ord_mul_montx
-
-.type ecp_nistz256_ord_sqr_montx,@function
-.align 32
-ecp_nistz256_ord_sqr_montx:
-.cfi_startproc
-.Lecp_nistz256_ord_sqr_montx:
- pushq %rbp
-.cfi_adjust_cfa_offset 8
-.cfi_offset %rbp,-16
- pushq %rbx
-.cfi_adjust_cfa_offset 8
-.cfi_offset %rbx,-24
- pushq %r12
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r12,-32
- pushq %r13
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r13,-40
- pushq %r14
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r14,-48
- pushq %r15
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r15,-56
-.Lord_sqrx_body:
-
- movq %rdx,%rbx
- movq 0(%rsi),%rdx
- movq 8(%rsi),%r14
- movq 16(%rsi),%r15
- movq 24(%rsi),%r8
- leaq .Lord(%rip),%rsi
- jmp .Loop_ord_sqrx
-
-.align 32
-.Loop_ord_sqrx:
- mulxq %r14,%r9,%r10
- mulxq %r15,%rcx,%r11
- movq %rdx,%rax
-.byte 102,73,15,110,206
- mulxq %r8,%rbp,%r12
- movq %r14,%rdx
- addq %rcx,%r10
-.byte 102,73,15,110,215
- adcq %rbp,%r11
- adcq $0,%r12
- xorq %r13,%r13
-
- mulxq %r15,%rcx,%rbp
- adcxq %rcx,%r11
- adoxq %rbp,%r12
-
- mulxq %r8,%rcx,%rbp
- movq %r15,%rdx
- adcxq %rcx,%r12
- adoxq %rbp,%r13
- adcq $0,%r13
-
- mulxq %r8,%rcx,%r14
- movq %rax,%rdx
-.byte 102,73,15,110,216
- xorq %r15,%r15
- adcxq %r9,%r9
- adoxq %rcx,%r13
- adcxq %r10,%r10
- adoxq %r15,%r14
-
-
- mulxq %rdx,%r8,%rbp
-.byte 102,72,15,126,202
- adcxq %r11,%r11
- adoxq %rbp,%r9
- adcxq %r12,%r12
- mulxq %rdx,%rcx,%rax
-.byte 102,72,15,126,210
- adcxq %r13,%r13
- adoxq %rcx,%r10
- adcxq %r14,%r14
- mulxq %rdx,%rcx,%rbp
-.byte 0x67
-.byte 102,72,15,126,218
- adoxq %rax,%r11
- adcxq %r15,%r15
- adoxq %rcx,%r12
- adoxq %rbp,%r13
- mulxq %rdx,%rcx,%rax
- adoxq %rcx,%r14
- adoxq %rax,%r15
-
-
- movq %r8,%rdx
- mulxq 32(%rsi),%rdx,%rcx
-
- xorq %rax,%rax
- mulxq 0(%rsi),%rcx,%rbp
- adcxq %rcx,%r8
- adoxq %rbp,%r9
- mulxq 8(%rsi),%rcx,%rbp
- adcxq %rcx,%r9
- adoxq %rbp,%r10
- mulxq 16(%rsi),%rcx,%rbp
- adcxq %rcx,%r10
- adoxq %rbp,%r11
- mulxq 24(%rsi),%rcx,%rbp
- adcxq %rcx,%r11
- adoxq %rbp,%r8
- adcxq %rax,%r8
-
-
- movq %r9,%rdx
- mulxq 32(%rsi),%rdx,%rcx
-
- mulxq 0(%rsi),%rcx,%rbp
- adoxq %rcx,%r9
- adcxq %rbp,%r10
- mulxq 8(%rsi),%rcx,%rbp
- adoxq %rcx,%r10
- adcxq %rbp,%r11
- mulxq 16(%rsi),%rcx,%rbp
- adoxq %rcx,%r11
- adcxq %rbp,%r8
- mulxq 24(%rsi),%rcx,%rbp
- adoxq %rcx,%r8
- adcxq %rbp,%r9
- adoxq %rax,%r9
-
-
- movq %r10,%rdx
- mulxq 32(%rsi),%rdx,%rcx
-
- mulxq 0(%rsi),%rcx,%rbp
- adcxq %rcx,%r10
- adoxq %rbp,%r11
- mulxq 8(%rsi),%rcx,%rbp
- adcxq %rcx,%r11
- adoxq %rbp,%r8
- mulxq 16(%rsi),%rcx,%rbp
- adcxq %rcx,%r8
- adoxq %rbp,%r9
- mulxq 24(%rsi),%rcx,%rbp
- adcxq %rcx,%r9
- adoxq %rbp,%r10
- adcxq %rax,%r10
-
-
- movq %r11,%rdx
- mulxq 32(%rsi),%rdx,%rcx
-
- mulxq 0(%rsi),%rcx,%rbp
- adoxq %rcx,%r11
- adcxq %rbp,%r8
- mulxq 8(%rsi),%rcx,%rbp
- adoxq %rcx,%r8
- adcxq %rbp,%r9
- mulxq 16(%rsi),%rcx,%rbp
- adoxq %rcx,%r9
- adcxq %rbp,%r10
- mulxq 24(%rsi),%rcx,%rbp
- adoxq %rcx,%r10
- adcxq %rbp,%r11
- adoxq %rax,%r11
-
-
- addq %r8,%r12
- adcq %r13,%r9
- movq %r12,%rdx
- adcq %r14,%r10
- adcq %r15,%r11
- movq %r9,%r14
- adcq $0,%rax
-
-
- subq 0(%rsi),%r12
- movq %r10,%r15
- sbbq 8(%rsi),%r9
- sbbq 16(%rsi),%r10
- movq %r11,%r8
- sbbq 24(%rsi),%r11
- sbbq $0,%rax
-
- cmovncq %r12,%rdx
- cmovncq %r9,%r14
- cmovncq %r10,%r15
- cmovncq %r11,%r8
-
- decq %rbx
- jnz .Loop_ord_sqrx
-
- movq %rdx,0(%rdi)
- movq %r14,8(%rdi)
- pxor %xmm1,%xmm1
- movq %r15,16(%rdi)
- pxor %xmm2,%xmm2
- movq %r8,24(%rdi)
- pxor %xmm3,%xmm3
-
- movq 0(%rsp),%r15
-.cfi_restore %r15
- movq 8(%rsp),%r14
-.cfi_restore %r14
- movq 16(%rsp),%r13
-.cfi_restore %r13
- movq 24(%rsp),%r12
-.cfi_restore %r12
- movq 32(%rsp),%rbx
-.cfi_restore %rbx
- movq 40(%rsp),%rbp
-.cfi_restore %rbp
- leaq 48(%rsp),%rsp
-.cfi_adjust_cfa_offset -48
-.Lord_sqrx_epilogue:
- .byte 0xf3,0xc3
-.cfi_endproc
-.size ecp_nistz256_ord_sqr_montx,.-ecp_nistz256_ord_sqr_montx
-
@@ -3876,10 +3412,10 @@ ecp_nistz256_ord_sqr_montx:
.type ecp_nistz256_to_mont,@function
.align 32
ecp_nistz256_to_mont:
- movl $0x80100,%ecx
- andl OPENSSL_ia32cap_P+8(%rip),%ecx
+.cfi_startproc
leaq .LRR(%rip),%rdx
jmp .Lmul_mont
+.cfi_endproc
.size ecp_nistz256_to_mont,.-ecp_nistz256_to_mont
@@ -3893,8 +3429,6 @@ ecp_nistz256_to_mont:
.align 32
ecp_nistz256_mul_mont:
.cfi_startproc
- movl $0x80100,%ecx
- andl OPENSSL_ia32cap_P+8(%rip),%ecx
.Lmul_mont:
pushq %rbp
.cfi_adjust_cfa_offset 8
@@ -3915,8 +3449,6 @@ ecp_nistz256_mul_mont:
.cfi_adjust_cfa_offset 8
.cfi_offset %r15,-56
.Lmul_body:
- cmpl $0x80100,%ecx
- je .Lmul_montx
movq %rdx,%rbx
movq 0(%rdx),%rax
movq 0(%rsi),%r9
@@ -3925,19 +3457,6 @@ ecp_nistz256_mul_mont:
movq 24(%rsi),%r12
call __ecp_nistz256_mul_montq
- jmp .Lmul_mont_done
-
-.align 32
-.Lmul_montx:
- movq %rdx,%rbx
- movq 0(%rdx),%rdx
- movq 0(%rsi),%r9
- movq 8(%rsi),%r10
- movq 16(%rsi),%r11
- movq 24(%rsi),%r12
- leaq -128(%rsi),%rsi
-
- call __ecp_nistz256_mul_montx
.Lmul_mont_done:
movq 0(%rsp),%r15
.cfi_restore %r15
@@ -4188,8 +3707,6 @@ __ecp_nistz256_mul_montq:
.align 32
ecp_nistz256_sqr_mont:
.cfi_startproc
- movl $0x80100,%ecx
- andl OPENSSL_ia32cap_P+8(%rip),%ecx
pushq %rbp
.cfi_adjust_cfa_offset 8
.cfi_offset %rbp,-16
@@ -4209,25 +3726,12 @@ ecp_nistz256_sqr_mont:
.cfi_adjust_cfa_offset 8
.cfi_offset %r15,-56
.Lsqr_body:
- cmpl $0x80100,%ecx
- je .Lsqr_montx
movq 0(%rsi),%rax
movq 8(%rsi),%r14
movq 16(%rsi),%r15
movq 24(%rsi),%r8
call __ecp_nistz256_sqr_montq
- jmp .Lsqr_mont_done
-
-.align 32
-.Lsqr_montx:
- movq 0(%rsi),%rdx
- movq 8(%rsi),%r14
- movq 16(%rsi),%r15
- movq 24(%rsi),%r8
- leaq -128(%rsi),%rsi
-
- call __ecp_nistz256_sqr_montx
.Lsqr_mont_done:
movq 0(%rsp),%r15
.cfi_restore %r15
@@ -4411,304 +3915,6 @@ __ecp_nistz256_sqr_montq:
.byte 0xf3,0xc3
.cfi_endproc
.size __ecp_nistz256_sqr_montq,.-__ecp_nistz256_sqr_montq
-.type __ecp_nistz256_mul_montx,@function
-.align 32
-__ecp_nistz256_mul_montx:
-.cfi_startproc
-
-
- mulxq %r9,%r8,%r9
- mulxq %r10,%rcx,%r10
- movq $32,%r14
- xorq %r13,%r13
- mulxq %r11,%rbp,%r11
- movq .Lpoly+24(%rip),%r15
- adcq %rcx,%r9
- mulxq %r12,%rcx,%r12
- movq %r8,%rdx
- adcq %rbp,%r10
- shlxq %r14,%r8,%rbp
- adcq %rcx,%r11
- shrxq %r14,%r8,%rcx
- adcq $0,%r12
-
-
-
- addq %rbp,%r9
- adcq %rcx,%r10
-
- mulxq %r15,%rcx,%rbp
- movq 8(%rbx),%rdx
- adcq %rcx,%r11
- adcq %rbp,%r12
- adcq $0,%r13
- xorq %r8,%r8
-
-
-
- mulxq 0+128(%rsi),%rcx,%rbp
- adcxq %rcx,%r9
- adoxq %rbp,%r10
-
- mulxq 8+128(%rsi),%rcx,%rbp
- adcxq %rcx,%r10
- adoxq %rbp,%r11
-
- mulxq 16+128(%rsi),%rcx,%rbp
- adcxq %rcx,%r11
- adoxq %rbp,%r12
-
- mulxq 24+128(%rsi),%rcx,%rbp
- movq %r9,%rdx
- adcxq %rcx,%r12
- shlxq %r14,%r9,%rcx
- adoxq %rbp,%r13
- shrxq %r14,%r9,%rbp
-
- adcxq %r8,%r13
- adoxq %r8,%r8
- adcq $0,%r8
-
-
-
- addq %rcx,%r10
- adcq %rbp,%r11
-
- mulxq %r15,%rcx,%rbp
- movq 16(%rbx),%rdx
- adcq %rcx,%r12
- adcq %rbp,%r13
- adcq $0,%r8
- xorq %r9,%r9
-
-
-
- mulxq 0+128(%rsi),%rcx,%rbp
- adcxq %rcx,%r10
- adoxq %rbp,%r11
-
- mulxq 8+128(%rsi),%rcx,%rbp
- adcxq %rcx,%r11
- adoxq %rbp,%r12
-
- mulxq 16+128(%rsi),%rcx,%rbp
- adcxq %rcx,%r12
- adoxq %rbp,%r13
-
- mulxq 24+128(%rsi),%rcx,%rbp
- movq %r10,%rdx
- adcxq %rcx,%r13
- shlxq %r14,%r10,%rcx
- adoxq %rbp,%r8
- shrxq %r14,%r10,%rbp
-
- adcxq %r9,%r8
- adoxq %r9,%r9
- adcq $0,%r9
-
-
-
- addq %rcx,%r11
- adcq %rbp,%r12
-
- mulxq %r15,%rcx,%rbp
- movq 24(%rbx),%rdx
- adcq %rcx,%r13
- adcq %rbp,%r8
- adcq $0,%r9
- xorq %r10,%r10
-
-
-
- mulxq 0+128(%rsi),%rcx,%rbp
- adcxq %rcx,%r11
- adoxq %rbp,%r12
-
- mulxq 8+128(%rsi),%rcx,%rbp
- adcxq %rcx,%r12
- adoxq %rbp,%r13
-
- mulxq 16+128(%rsi),%rcx,%rbp
- adcxq %rcx,%r13
- adoxq %rbp,%r8
-
- mulxq 24+128(%rsi),%rcx,%rbp
- movq %r11,%rdx
- adcxq %rcx,%r8
- shlxq %r14,%r11,%rcx
- adoxq %rbp,%r9
- shrxq %r14,%r11,%rbp
-
- adcxq %r10,%r9
- adoxq %r10,%r10
- adcq $0,%r10
-
-
-
- addq %rcx,%r12
- adcq %rbp,%r13
-
- mulxq %r15,%rcx,%rbp
- movq %r12,%rbx
- movq .Lpoly+8(%rip),%r14
- adcq %rcx,%r8
- movq %r13,%rdx
- adcq %rbp,%r9
- adcq $0,%r10
-
-
-
- xorl %eax,%eax
- movq %r8,%rcx
- sbbq $-1,%r12
- sbbq %r14,%r13
- sbbq $0,%r8
- movq %r9,%rbp
- sbbq %r15,%r9
- sbbq $0,%r10
-
- cmovcq %rbx,%r12
- cmovcq %rdx,%r13
- movq %r12,0(%rdi)
- cmovcq %rcx,%r8
- movq %r13,8(%rdi)
- cmovcq %rbp,%r9
- movq %r8,16(%rdi)
- movq %r9,24(%rdi)
-
- .byte 0xf3,0xc3
-.cfi_endproc
-.size __ecp_nistz256_mul_montx,.-__ecp_nistz256_mul_montx
-
-.type __ecp_nistz256_sqr_montx,@function
-.align 32
-__ecp_nistz256_sqr_montx:
-.cfi_startproc
- mulxq %r14,%r9,%r10
- mulxq %r15,%rcx,%r11
- xorl %eax,%eax
- adcq %rcx,%r10
- mulxq %r8,%rbp,%r12
- movq %r14,%rdx
- adcq %rbp,%r11
- adcq $0,%r12
- xorq %r13,%r13
-
-
- mulxq %r15,%rcx,%rbp
- adcxq %rcx,%r11
- adoxq %rbp,%r12
-
- mulxq %r8,%rcx,%rbp
- movq %r15,%rdx
- adcxq %rcx,%r12
- adoxq %rbp,%r13
- adcq $0,%r13
-
-
- mulxq %r8,%rcx,%r14
- movq 0+128(%rsi),%rdx
- xorq %r15,%r15
- adcxq %r9,%r9
- adoxq %rcx,%r13
- adcxq %r10,%r10
- adoxq %r15,%r14
-
- mulxq %rdx,%r8,%rbp
- movq 8+128(%rsi),%rdx
- adcxq %r11,%r11
- adoxq %rbp,%r9
- adcxq %r12,%r12
- mulxq %rdx,%rcx,%rax
- movq 16+128(%rsi),%rdx
- adcxq %r13,%r13
- adoxq %rcx,%r10
- adcxq %r14,%r14
-.byte 0x67
- mulxq %rdx,%rcx,%rbp
- movq 24+128(%rsi),%rdx
- adoxq %rax,%r11
- adcxq %r15,%r15
- adoxq %rcx,%r12
- movq $32,%rsi
- adoxq %rbp,%r13
-.byte 0x67,0x67
- mulxq %rdx,%rcx,%rax
- movq .Lpoly+24(%rip),%rdx
- adoxq %rcx,%r14
- shlxq %rsi,%r8,%rcx
- adoxq %rax,%r15
- shrxq %rsi,%r8,%rax
- movq %rdx,%rbp
-
-
- addq %rcx,%r9
- adcq %rax,%r10
-
- mulxq %r8,%rcx,%r8
- adcq %rcx,%r11
- shlxq %rsi,%r9,%rcx
- adcq $0,%r8
- shrxq %rsi,%r9,%rax
-
-
- addq %rcx,%r10
- adcq %rax,%r11
-
- mulxq %r9,%rcx,%r9
- adcq %rcx,%r8
- shlxq %rsi,%r10,%rcx
- adcq $0,%r9
- shrxq %rsi,%r10,%rax
-
-
- addq %rcx,%r11
- adcq %rax,%r8
-
- mulxq %r10,%rcx,%r10
- adcq %rcx,%r9
- shlxq %rsi,%r11,%rcx
- adcq $0,%r10
- shrxq %rsi,%r11,%rax
-
-
- addq %rcx,%r8
- adcq %rax,%r9
-
- mulxq %r11,%rcx,%r11
- adcq %rcx,%r10
- adcq $0,%r11
-
- xorq %rdx,%rdx
- addq %r8,%r12
- movq .Lpoly+8(%rip),%rsi
- adcq %r9,%r13
- movq %r12,%r8
- adcq %r10,%r14
- adcq %r11,%r15
- movq %r13,%r9
- adcq $0,%rdx
-
- subq $-1,%r12
- movq %r14,%r10
- sbbq %rsi,%r13
- sbbq $0,%r14
- movq %r15,%r11
- sbbq %rbp,%r15
- sbbq $0,%rdx
-
- cmovcq %r8,%r12
- cmovcq %r9,%r13
- movq %r12,0(%rdi)
- cmovcq %r10,%r14
- movq %r13,8(%rdi)
- cmovcq %r11,%r15
- movq %r14,16(%rdi)
- movq %r15,24(%rdi)
-
- .byte 0xf3,0xc3
-.cfi_endproc
-.size __ecp_nistz256_sqr_montx,.-__ecp_nistz256_sqr_montx
@@ -4823,6 +4029,7 @@ ecp_nistz256_from_mont:
.type ecp_nistz256_scatter_w5,@function
.align 32
ecp_nistz256_scatter_w5:
+.cfi_startproc
leal -3(%rdx,%rdx,2),%edx
movdqa 0(%rsi),%xmm0
shll $5,%edx
@@ -4839,6 +4046,7 @@ ecp_nistz256_scatter_w5:
movdqa %xmm5,80(%rdi,%rdx,1)
.byte 0xf3,0xc3
+.cfi_endproc
.size ecp_nistz256_scatter_w5,.-ecp_nistz256_scatter_w5
@@ -4848,9 +4056,6 @@ ecp_nistz256_scatter_w5:
.align 32
ecp_nistz256_gather_w5:
.cfi_startproc
- movl OPENSSL_ia32cap_P+8(%rip),%eax
- testl $32,%eax
- jnz .Lavx2_gather_w5
movdqa .LOne(%rip),%xmm0
movd %edx,%xmm1
@@ -4912,6 +4117,7 @@ ecp_nistz256_gather_w5:
.type ecp_nistz256_scatter_w7,@function
.align 32
ecp_nistz256_scatter_w7:
+.cfi_startproc
movdqu 0(%rsi),%xmm0
shll $6,%edx
movdqu 16(%rsi),%xmm1
@@ -4923,6 +4129,7 @@ ecp_nistz256_scatter_w7:
movdqa %xmm3,48(%rdi,%rdx,1)
.byte 0xf3,0xc3
+.cfi_endproc
.size ecp_nistz256_scatter_w7,.-ecp_nistz256_scatter_w7
@@ -4932,9 +4139,6 @@ ecp_nistz256_scatter_w7:
.align 32
ecp_nistz256_gather_w7:
.cfi_startproc
- movl OPENSSL_ia32cap_P+8(%rip),%eax
- testl $32,%eax
- jnz .Lavx2_gather_w7
movdqa .LOne(%rip),%xmm8
movd %edx,%xmm1
@@ -4978,148 +4182,14 @@ ecp_nistz256_gather_w7:
.cfi_endproc
.LSEH_end_ecp_nistz256_gather_w7:
.size ecp_nistz256_gather_w7,.-ecp_nistz256_gather_w7
-
-
-.type ecp_nistz256_avx2_gather_w5,@function
-.align 32
-ecp_nistz256_avx2_gather_w5:
-.cfi_startproc
-.Lavx2_gather_w5:
- vzeroupper
- vmovdqa .LTwo(%rip),%ymm0
-
- vpxor %ymm2,%ymm2,%ymm2
- vpxor %ymm3,%ymm3,%ymm3
- vpxor %ymm4,%ymm4,%ymm4
-
- vmovdqa .LOne(%rip),%ymm5
- vmovdqa .LTwo(%rip),%ymm10
-
- vmovd %edx,%xmm1
- vpermd %ymm1,%ymm2,%ymm1
-
- movq $8,%rax
-.Lselect_loop_avx2_w5:
-
- vmovdqa 0(%rsi),%ymm6
- vmovdqa 32(%rsi),%ymm7
- vmovdqa 64(%rsi),%ymm8
-
- vmovdqa 96(%rsi),%ymm11
- vmovdqa 128(%rsi),%ymm12
- vmovdqa 160(%rsi),%ymm13
-
- vpcmpeqd %ymm1,%ymm5,%ymm9
- vpcmpeqd %ymm1,%ymm10,%ymm14
-
- vpaddd %ymm0,%ymm5,%ymm5
- vpaddd %ymm0,%ymm10,%ymm10
- leaq 192(%rsi),%rsi
-
- vpand %ymm9,%ymm6,%ymm6
- vpand %ymm9,%ymm7,%ymm7
- vpand %ymm9,%ymm8,%ymm8
- vpand %ymm14,%ymm11,%ymm11
- vpand %ymm14,%ymm12,%ymm12
- vpand %ymm14,%ymm13,%ymm13
-
- vpxor %ymm6,%ymm2,%ymm2
- vpxor %ymm7,%ymm3,%ymm3
- vpxor %ymm8,%ymm4,%ymm4
- vpxor %ymm11,%ymm2,%ymm2
- vpxor %ymm12,%ymm3,%ymm3
- vpxor %ymm13,%ymm4,%ymm4
-
- decq %rax
- jnz .Lselect_loop_avx2_w5
-
- vmovdqu %ymm2,0(%rdi)
- vmovdqu %ymm3,32(%rdi)
- vmovdqu %ymm4,64(%rdi)
- vzeroupper
- .byte 0xf3,0xc3
-.cfi_endproc
-.LSEH_end_ecp_nistz256_avx2_gather_w5:
-.size ecp_nistz256_avx2_gather_w5,.-ecp_nistz256_avx2_gather_w5
-
-
-
.globl ecp_nistz256_avx2_gather_w7
.type ecp_nistz256_avx2_gather_w7,@function
.align 32
ecp_nistz256_avx2_gather_w7:
.cfi_startproc
-.Lavx2_gather_w7:
- vzeroupper
- vmovdqa .LThree(%rip),%ymm0
-
- vpxor %ymm2,%ymm2,%ymm2
- vpxor %ymm3,%ymm3,%ymm3
-
- vmovdqa .LOne(%rip),%ymm4
- vmovdqa .LTwo(%rip),%ymm8
- vmovdqa .LThree(%rip),%ymm12
-
- vmovd %edx,%xmm1
- vpermd %ymm1,%ymm2,%ymm1
-
-
- movq $21,%rax
-.Lselect_loop_avx2_w7:
-
- vmovdqa 0(%rsi),%ymm5
- vmovdqa 32(%rsi),%ymm6
-
- vmovdqa 64(%rsi),%ymm9
- vmovdqa 96(%rsi),%ymm10
-
- vmovdqa 128(%rsi),%ymm13
- vmovdqa 160(%rsi),%ymm14
-
- vpcmpeqd %ymm1,%ymm4,%ymm7
- vpcmpeqd %ymm1,%ymm8,%ymm11
- vpcmpeqd %ymm1,%ymm12,%ymm15
-
- vpaddd %ymm0,%ymm4,%ymm4
- vpaddd %ymm0,%ymm8,%ymm8
- vpaddd %ymm0,%ymm12,%ymm12
- leaq 192(%rsi),%rsi
-
- vpand %ymm7,%ymm5,%ymm5
- vpand %ymm7,%ymm6,%ymm6
- vpand %ymm11,%ymm9,%ymm9
- vpand %ymm11,%ymm10,%ymm10
- vpand %ymm15,%ymm13,%ymm13
- vpand %ymm15,%ymm14,%ymm14
-
- vpxor %ymm5,%ymm2,%ymm2
- vpxor %ymm6,%ymm3,%ymm3
- vpxor %ymm9,%ymm2,%ymm2
- vpxor %ymm10,%ymm3,%ymm3
- vpxor %ymm13,%ymm2,%ymm2
- vpxor %ymm14,%ymm3,%ymm3
-
- decq %rax
- jnz .Lselect_loop_avx2_w7
-
-
- vmovdqa 0(%rsi),%ymm5
- vmovdqa 32(%rsi),%ymm6
-
- vpcmpeqd %ymm1,%ymm4,%ymm7
-
- vpand %ymm7,%ymm5,%ymm5
- vpand %ymm7,%ymm6,%ymm6
-
- vpxor %ymm5,%ymm2,%ymm2
- vpxor %ymm6,%ymm3,%ymm3
-
- vmovdqu %ymm2,0(%rdi)
- vmovdqu %ymm3,32(%rdi)
- vzeroupper
+.byte 0x0f,0x0b
.byte 0xf3,0xc3
.cfi_endproc
-.LSEH_end_ecp_nistz256_avx2_gather_w7:
.size ecp_nistz256_avx2_gather_w7,.-ecp_nistz256_avx2_gather_w7
.type __ecp_nistz256_add_toq,@function
.align 32
@@ -5255,10 +4325,6 @@ __ecp_nistz256_mul_by_2q:
.align 32
ecp_nistz256_point_double:
.cfi_startproc
- movl $0x80100,%ecx
- andl OPENSSL_ia32cap_P+8(%rip),%ecx
- cmpl $0x80100,%ecx
- je .Lpoint_doublex
pushq %rbp
.cfi_adjust_cfa_offset 8
.cfi_offset %rbp,-16
@@ -5487,10 +4553,6 @@ ecp_nistz256_point_double:
.align 32
ecp_nistz256_point_add:
.cfi_startproc
- movl $0x80100,%ecx
- andl OPENSSL_ia32cap_P+8(%rip),%ecx
- cmpl $0x80100,%ecx
- je .Lpoint_addx
pushq %rbp
.cfi_adjust_cfa_offset 8
.cfi_offset %rbp,-16
@@ -5657,26 +4719,16 @@ ecp_nistz256_point_add:
orq %r8,%r12
orq %r9,%r12
-.byte 0x3e
- jnz .Ladd_proceedq
.byte 102,73,15,126,208
.byte 102,73,15,126,217
- testq %r8,%r8
- jnz .Ladd_proceedq
- testq %r9,%r9
- jz .Ladd_doubleq
-.byte 102,72,15,126,199
- pxor %xmm0,%xmm0
- movdqu %xmm0,0(%rdi)
- movdqu %xmm0,16(%rdi)
- movdqu %xmm0,32(%rdi)
- movdqu %xmm0,48(%rdi)
- movdqu %xmm0,64(%rdi)
- movdqu %xmm0,80(%rdi)
- jmp .Ladd_doneq
+ orq %r8,%r12
+ orq %r9,%r12
+
+
+.byte 0x3e
+ jnz .Ladd_proceedq
-.align 32
.Ladd_doubleq:
.byte 102,72,15,126,206
.byte 102,72,15,126,199
@@ -5915,10 +4967,6 @@ ecp_nistz256_point_add:
.align 32
ecp_nistz256_point_add_affine:
.cfi_startproc
- movl $0x80100,%ecx
- andl OPENSSL_ia32cap_P+8(%rip),%ecx
- cmpl $0x80100,%ecx
- je .Lpoint_add_affinex
pushq %rbp
.cfi_adjust_cfa_offset 8
.cfi_offset %rbp,-16
@@ -6242,1118 +5290,3 @@ ecp_nistz256_point_add_affine:
.byte 0xf3,0xc3
.cfi_endproc
.size ecp_nistz256_point_add_affine,.-ecp_nistz256_point_add_affine
-.type __ecp_nistz256_add_tox,@function
-.align 32
-__ecp_nistz256_add_tox:
-.cfi_startproc
- xorq %r11,%r11
- adcq 0(%rbx),%r12
- adcq 8(%rbx),%r13
- movq %r12,%rax
- adcq 16(%rbx),%r8
- adcq 24(%rbx),%r9
- movq %r13,%rbp
- adcq $0,%r11
-
- xorq %r10,%r10
- sbbq $-1,%r12
- movq %r8,%rcx
- sbbq %r14,%r13
- sbbq $0,%r8
- movq %r9,%r10
- sbbq %r15,%r9
- sbbq $0,%r11
-
- cmovcq %rax,%r12
- cmovcq %rbp,%r13
- movq %r12,0(%rdi)
- cmovcq %rcx,%r8
- movq %r13,8(%rdi)
- cmovcq %r10,%r9
- movq %r8,16(%rdi)
- movq %r9,24(%rdi)
-
- .byte 0xf3,0xc3
-.cfi_endproc
-.size __ecp_nistz256_add_tox,.-__ecp_nistz256_add_tox
-
-.type __ecp_nistz256_sub_fromx,@function
-.align 32
-__ecp_nistz256_sub_fromx:
-.cfi_startproc
- xorq %r11,%r11
- sbbq 0(%rbx),%r12
- sbbq 8(%rbx),%r13
- movq %r12,%rax
- sbbq 16(%rbx),%r8
- sbbq 24(%rbx),%r9
- movq %r13,%rbp
- sbbq $0,%r11
-
- xorq %r10,%r10
- adcq $-1,%r12
- movq %r8,%rcx
- adcq %r14,%r13
- adcq $0,%r8
- movq %r9,%r10
- adcq %r15,%r9
-
- btq $0,%r11
- cmovncq %rax,%r12
- cmovncq %rbp,%r13
- movq %r12,0(%rdi)
- cmovncq %rcx,%r8
- movq %r13,8(%rdi)
- cmovncq %r10,%r9
- movq %r8,16(%rdi)
- movq %r9,24(%rdi)
-
- .byte 0xf3,0xc3
-.cfi_endproc
-.size __ecp_nistz256_sub_fromx,.-__ecp_nistz256_sub_fromx
-
-.type __ecp_nistz256_subx,@function
-.align 32
-__ecp_nistz256_subx:
-.cfi_startproc
- xorq %r11,%r11
- sbbq %r12,%rax
- sbbq %r13,%rbp
- movq %rax,%r12
- sbbq %r8,%rcx
- sbbq %r9,%r10
- movq %rbp,%r13
- sbbq $0,%r11
-
- xorq %r9,%r9
- adcq $-1,%rax
- movq %rcx,%r8
- adcq %r14,%rbp
- adcq $0,%rcx
- movq %r10,%r9
- adcq %r15,%r10
-
- btq $0,%r11
- cmovcq %rax,%r12
- cmovcq %rbp,%r13
- cmovcq %rcx,%r8
- cmovcq %r10,%r9
-
- .byte 0xf3,0xc3
-.cfi_endproc
-.size __ecp_nistz256_subx,.-__ecp_nistz256_subx
-
-.type __ecp_nistz256_mul_by_2x,@function
-.align 32
-__ecp_nistz256_mul_by_2x:
-.cfi_startproc
- xorq %r11,%r11
- adcq %r12,%r12
- adcq %r13,%r13
- movq %r12,%rax
- adcq %r8,%r8
- adcq %r9,%r9
- movq %r13,%rbp
- adcq $0,%r11
-
- xorq %r10,%r10
- sbbq $-1,%r12
- movq %r8,%rcx
- sbbq %r14,%r13
- sbbq $0,%r8
- movq %r9,%r10
- sbbq %r15,%r9
- sbbq $0,%r11
-
- cmovcq %rax,%r12
- cmovcq %rbp,%r13
- movq %r12,0(%rdi)
- cmovcq %rcx,%r8
- movq %r13,8(%rdi)
- cmovcq %r10,%r9
- movq %r8,16(%rdi)
- movq %r9,24(%rdi)
-
- .byte 0xf3,0xc3
-.cfi_endproc
-.size __ecp_nistz256_mul_by_2x,.-__ecp_nistz256_mul_by_2x
-.type ecp_nistz256_point_doublex,@function
-.align 32
-ecp_nistz256_point_doublex:
-.cfi_startproc
-.Lpoint_doublex:
- pushq %rbp
-.cfi_adjust_cfa_offset 8
-.cfi_offset %rbp,-16
- pushq %rbx
-.cfi_adjust_cfa_offset 8
-.cfi_offset %rbx,-24
- pushq %r12
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r12,-32
- pushq %r13
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r13,-40
- pushq %r14
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r14,-48
- pushq %r15
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r15,-56
- subq $160+8,%rsp
-.cfi_adjust_cfa_offset 32*5+8
-.Lpoint_doublex_body:
-
-.Lpoint_double_shortcutx:
- movdqu 0(%rsi),%xmm0
- movq %rsi,%rbx
- movdqu 16(%rsi),%xmm1
- movq 32+0(%rsi),%r12
- movq 32+8(%rsi),%r13
- movq 32+16(%rsi),%r8
- movq 32+24(%rsi),%r9
- movq .Lpoly+8(%rip),%r14
- movq .Lpoly+24(%rip),%r15
- movdqa %xmm0,96(%rsp)
- movdqa %xmm1,96+16(%rsp)
- leaq 32(%rdi),%r10
- leaq 64(%rdi),%r11
-.byte 102,72,15,110,199
-.byte 102,73,15,110,202
-.byte 102,73,15,110,211
-
- leaq 0(%rsp),%rdi
- call __ecp_nistz256_mul_by_2x
-
- movq 64+0(%rsi),%rdx
- movq 64+8(%rsi),%r14
- movq 64+16(%rsi),%r15
- movq 64+24(%rsi),%r8
- leaq 64-128(%rsi),%rsi
- leaq 64(%rsp),%rdi
- call __ecp_nistz256_sqr_montx
-
- movq 0+0(%rsp),%rdx
- movq 8+0(%rsp),%r14
- leaq -128+0(%rsp),%rsi
- movq 16+0(%rsp),%r15
- movq 24+0(%rsp),%r8
- leaq 0(%rsp),%rdi
- call __ecp_nistz256_sqr_montx
-
- movq 32(%rbx),%rdx
- movq 64+0(%rbx),%r9
- movq 64+8(%rbx),%r10
- movq 64+16(%rbx),%r11
- movq 64+24(%rbx),%r12
- leaq 64-128(%rbx),%rsi
- leaq 32(%rbx),%rbx
-.byte 102,72,15,126,215
- call __ecp_nistz256_mul_montx
- call __ecp_nistz256_mul_by_2x
-
- movq 96+0(%rsp),%r12
- movq 96+8(%rsp),%r13
- leaq 64(%rsp),%rbx
- movq 96+16(%rsp),%r8
- movq 96+24(%rsp),%r9
- leaq 32(%rsp),%rdi
- call __ecp_nistz256_add_tox
-
- movq 96+0(%rsp),%r12
- movq 96+8(%rsp),%r13
- leaq 64(%rsp),%rbx
- movq 96+16(%rsp),%r8
- movq 96+24(%rsp),%r9
- leaq 64(%rsp),%rdi
- call __ecp_nistz256_sub_fromx
-
- movq 0+0(%rsp),%rdx
- movq 8+0(%rsp),%r14
- leaq -128+0(%rsp),%rsi
- movq 16+0(%rsp),%r15
- movq 24+0(%rsp),%r8
-.byte 102,72,15,126,207
- call __ecp_nistz256_sqr_montx
- xorq %r9,%r9
- movq %r12,%rax
- addq $-1,%r12
- movq %r13,%r10
- adcq %rsi,%r13
- movq %r14,%rcx
- adcq $0,%r14
- movq %r15,%r8
- adcq %rbp,%r15
- adcq $0,%r9
- xorq %rsi,%rsi
- testq $1,%rax
-
- cmovzq %rax,%r12
- cmovzq %r10,%r13
- cmovzq %rcx,%r14
- cmovzq %r8,%r15
- cmovzq %rsi,%r9
-
- movq %r13,%rax
- shrq $1,%r12
- shlq $63,%rax
- movq %r14,%r10
- shrq $1,%r13
- orq %rax,%r12
- shlq $63,%r10
- movq %r15,%rcx
- shrq $1,%r14
- orq %r10,%r13
- shlq $63,%rcx
- movq %r12,0(%rdi)
- shrq $1,%r15
- movq %r13,8(%rdi)
- shlq $63,%r9
- orq %rcx,%r14
- orq %r9,%r15
- movq %r14,16(%rdi)
- movq %r15,24(%rdi)
- movq 64(%rsp),%rdx
- leaq 64(%rsp),%rbx
- movq 0+32(%rsp),%r9
- movq 8+32(%rsp),%r10
- leaq -128+32(%rsp),%rsi
- movq 16+32(%rsp),%r11
- movq 24+32(%rsp),%r12
- leaq 32(%rsp),%rdi
- call __ecp_nistz256_mul_montx
-
- leaq 128(%rsp),%rdi
- call __ecp_nistz256_mul_by_2x
-
- leaq 32(%rsp),%rbx
- leaq 32(%rsp),%rdi
- call __ecp_nistz256_add_tox
-
- movq 96(%rsp),%rdx
- leaq 96(%rsp),%rbx
- movq 0+0(%rsp),%r9
- movq 8+0(%rsp),%r10
- leaq -128+0(%rsp),%rsi
- movq 16+0(%rsp),%r11
- movq 24+0(%rsp),%r12
- leaq 0(%rsp),%rdi
- call __ecp_nistz256_mul_montx
-
- leaq 128(%rsp),%rdi
- call __ecp_nistz256_mul_by_2x
-
- movq 0+32(%rsp),%rdx
- movq 8+32(%rsp),%r14
- leaq -128+32(%rsp),%rsi
- movq 16+32(%rsp),%r15
- movq 24+32(%rsp),%r8
-.byte 102,72,15,126,199
- call __ecp_nistz256_sqr_montx
-
- leaq 128(%rsp),%rbx
- movq %r14,%r8
- movq %r15,%r9
- movq %rsi,%r14
- movq %rbp,%r15
- call __ecp_nistz256_sub_fromx
-
- movq 0+0(%rsp),%rax
- movq 0+8(%rsp),%rbp
- movq 0+16(%rsp),%rcx
- movq 0+24(%rsp),%r10
- leaq 0(%rsp),%rdi
- call __ecp_nistz256_subx
-
- movq 32(%rsp),%rdx
- leaq 32(%rsp),%rbx
- movq %r12,%r14
- xorl %ecx,%ecx
- movq %r12,0+0(%rsp)
- movq %r13,%r10
- movq %r13,0+8(%rsp)
- cmovzq %r8,%r11
- movq %r8,0+16(%rsp)
- leaq 0-128(%rsp),%rsi
- cmovzq %r9,%r12
- movq %r9,0+24(%rsp)
- movq %r14,%r9
- leaq 0(%rsp),%rdi
- call __ecp_nistz256_mul_montx
-
-.byte 102,72,15,126,203
-.byte 102,72,15,126,207
- call __ecp_nistz256_sub_fromx
-
- leaq 160+56(%rsp),%rsi
-.cfi_def_cfa %rsi,8
- movq -48(%rsi),%r15
-.cfi_restore %r15
- movq -40(%rsi),%r14
-.cfi_restore %r14
- movq -32(%rsi),%r13
-.cfi_restore %r13
- movq -24(%rsi),%r12
-.cfi_restore %r12
- movq -16(%rsi),%rbx
-.cfi_restore %rbx
- movq -8(%rsi),%rbp
-.cfi_restore %rbp
- leaq (%rsi),%rsp
-.cfi_def_cfa_register %rsp
-.Lpoint_doublex_epilogue:
- .byte 0xf3,0xc3
-.cfi_endproc
-.size ecp_nistz256_point_doublex,.-ecp_nistz256_point_doublex
-.type ecp_nistz256_point_addx,@function
-.align 32
-ecp_nistz256_point_addx:
-.cfi_startproc
-.Lpoint_addx:
- pushq %rbp
-.cfi_adjust_cfa_offset 8
-.cfi_offset %rbp,-16
- pushq %rbx
-.cfi_adjust_cfa_offset 8
-.cfi_offset %rbx,-24
- pushq %r12
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r12,-32
- pushq %r13
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r13,-40
- pushq %r14
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r14,-48
- pushq %r15
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r15,-56
- subq $576+8,%rsp
-.cfi_adjust_cfa_offset 32*18+8
-.Lpoint_addx_body:
-
- movdqu 0(%rsi),%xmm0
- movdqu 16(%rsi),%xmm1
- movdqu 32(%rsi),%xmm2
- movdqu 48(%rsi),%xmm3
- movdqu 64(%rsi),%xmm4
- movdqu 80(%rsi),%xmm5
- movq %rsi,%rbx
- movq %rdx,%rsi
- movdqa %xmm0,384(%rsp)
- movdqa %xmm1,384+16(%rsp)
- movdqa %xmm2,416(%rsp)
- movdqa %xmm3,416+16(%rsp)
- movdqa %xmm4,448(%rsp)
- movdqa %xmm5,448+16(%rsp)
- por %xmm4,%xmm5
-
- movdqu 0(%rsi),%xmm0
- pshufd $0xb1,%xmm5,%xmm3
- movdqu 16(%rsi),%xmm1
- movdqu 32(%rsi),%xmm2
- por %xmm3,%xmm5
- movdqu 48(%rsi),%xmm3
- movq 64+0(%rsi),%rdx
- movq 64+8(%rsi),%r14
- movq 64+16(%rsi),%r15
- movq 64+24(%rsi),%r8
- movdqa %xmm0,480(%rsp)
- pshufd $0x1e,%xmm5,%xmm4
- movdqa %xmm1,480+16(%rsp)
- movdqu 64(%rsi),%xmm0
- movdqu 80(%rsi),%xmm1
- movdqa %xmm2,512(%rsp)
- movdqa %xmm3,512+16(%rsp)
- por %xmm4,%xmm5
- pxor %xmm4,%xmm4
- por %xmm0,%xmm1
-.byte 102,72,15,110,199
-
- leaq 64-128(%rsi),%rsi
- movq %rdx,544+0(%rsp)
- movq %r14,544+8(%rsp)
- movq %r15,544+16(%rsp)
- movq %r8,544+24(%rsp)
- leaq 96(%rsp),%rdi
- call __ecp_nistz256_sqr_montx
-
- pcmpeqd %xmm4,%xmm5
- pshufd $0xb1,%xmm1,%xmm4
- por %xmm1,%xmm4
- pshufd $0,%xmm5,%xmm5
- pshufd $0x1e,%xmm4,%xmm3
- por %xmm3,%xmm4
- pxor %xmm3,%xmm3
- pcmpeqd %xmm3,%xmm4
- pshufd $0,%xmm4,%xmm4
- movq 64+0(%rbx),%rdx
- movq 64+8(%rbx),%r14
- movq 64+16(%rbx),%r15
- movq 64+24(%rbx),%r8
-.byte 102,72,15,110,203
-
- leaq 64-128(%rbx),%rsi
- leaq 32(%rsp),%rdi
- call __ecp_nistz256_sqr_montx
-
- movq 544(%rsp),%rdx
- leaq 544(%rsp),%rbx
- movq 0+96(%rsp),%r9
- movq 8+96(%rsp),%r10
- leaq -128+96(%rsp),%rsi
- movq 16+96(%rsp),%r11
- movq 24+96(%rsp),%r12
- leaq 224(%rsp),%rdi
- call __ecp_nistz256_mul_montx
-
- movq 448(%rsp),%rdx
- leaq 448(%rsp),%rbx
- movq 0+32(%rsp),%r9
- movq 8+32(%rsp),%r10
- leaq -128+32(%rsp),%rsi
- movq 16+32(%rsp),%r11
- movq 24+32(%rsp),%r12
- leaq 256(%rsp),%rdi
- call __ecp_nistz256_mul_montx
-
- movq 416(%rsp),%rdx
- leaq 416(%rsp),%rbx
- movq 0+224(%rsp),%r9
- movq 8+224(%rsp),%r10
- leaq -128+224(%rsp),%rsi
- movq 16+224(%rsp),%r11
- movq 24+224(%rsp),%r12
- leaq 224(%rsp),%rdi
- call __ecp_nistz256_mul_montx
-
- movq 512(%rsp),%rdx
- leaq 512(%rsp),%rbx
- movq 0+256(%rsp),%r9
- movq 8+256(%rsp),%r10
- leaq -128+256(%rsp),%rsi
- movq 16+256(%rsp),%r11
- movq 24+256(%rsp),%r12
- leaq 256(%rsp),%rdi
- call __ecp_nistz256_mul_montx
-
- leaq 224(%rsp),%rbx
- leaq 64(%rsp),%rdi
- call __ecp_nistz256_sub_fromx
-
- orq %r13,%r12
- movdqa %xmm4,%xmm2
- orq %r8,%r12
- orq %r9,%r12
- por %xmm5,%xmm2
-.byte 102,73,15,110,220
-
- movq 384(%rsp),%rdx
- leaq 384(%rsp),%rbx
- movq 0+96(%rsp),%r9
- movq 8+96(%rsp),%r10
- leaq -128+96(%rsp),%rsi
- movq 16+96(%rsp),%r11
- movq 24+96(%rsp),%r12
- leaq 160(%rsp),%rdi
- call __ecp_nistz256_mul_montx
-
- movq 480(%rsp),%rdx
- leaq 480(%rsp),%rbx
- movq 0+32(%rsp),%r9
- movq 8+32(%rsp),%r10
- leaq -128+32(%rsp),%rsi
- movq 16+32(%rsp),%r11
- movq 24+32(%rsp),%r12
- leaq 192(%rsp),%rdi
- call __ecp_nistz256_mul_montx
-
- leaq 160(%rsp),%rbx
- leaq 0(%rsp),%rdi
- call __ecp_nistz256_sub_fromx
-
- orq %r13,%r12
- orq %r8,%r12
- orq %r9,%r12
-
-.byte 0x3e
- jnz .Ladd_proceedx
-.byte 102,73,15,126,208
-.byte 102,73,15,126,217
- testq %r8,%r8
- jnz .Ladd_proceedx
- testq %r9,%r9
- jz .Ladd_doublex
-
-.byte 102,72,15,126,199
- pxor %xmm0,%xmm0
- movdqu %xmm0,0(%rdi)
- movdqu %xmm0,16(%rdi)
- movdqu %xmm0,32(%rdi)
- movdqu %xmm0,48(%rdi)
- movdqu %xmm0,64(%rdi)
- movdqu %xmm0,80(%rdi)
- jmp .Ladd_donex
-
-.align 32
-.Ladd_doublex:
-.byte 102,72,15,126,206
-.byte 102,72,15,126,199
- addq $416,%rsp
-.cfi_adjust_cfa_offset -416
- jmp .Lpoint_double_shortcutx
-.cfi_adjust_cfa_offset 416
-
-.align 32
-.Ladd_proceedx:
- movq 0+64(%rsp),%rdx
- movq 8+64(%rsp),%r14
- leaq -128+64(%rsp),%rsi
- movq 16+64(%rsp),%r15
- movq 24+64(%rsp),%r8
- leaq 96(%rsp),%rdi
- call __ecp_nistz256_sqr_montx
-
- movq 448(%rsp),%rdx
- leaq 448(%rsp),%rbx
- movq 0+0(%rsp),%r9
- movq 8+0(%rsp),%r10
- leaq -128+0(%rsp),%rsi
- movq 16+0(%rsp),%r11
- movq 24+0(%rsp),%r12
- leaq 352(%rsp),%rdi
- call __ecp_nistz256_mul_montx
-
- movq 0+0(%rsp),%rdx
- movq 8+0(%rsp),%r14
- leaq -128+0(%rsp),%rsi
- movq 16+0(%rsp),%r15
- movq 24+0(%rsp),%r8
- leaq 32(%rsp),%rdi
- call __ecp_nistz256_sqr_montx
-
- movq 544(%rsp),%rdx
- leaq 544(%rsp),%rbx
- movq 0+352(%rsp),%r9
- movq 8+352(%rsp),%r10
- leaq -128+352(%rsp),%rsi
- movq 16+352(%rsp),%r11
- movq 24+352(%rsp),%r12
- leaq 352(%rsp),%rdi
- call __ecp_nistz256_mul_montx
-
- movq 0(%rsp),%rdx
- leaq 0(%rsp),%rbx
- movq 0+32(%rsp),%r9
- movq 8+32(%rsp),%r10
- leaq -128+32(%rsp),%rsi
- movq 16+32(%rsp),%r11
- movq 24+32(%rsp),%r12
- leaq 128(%rsp),%rdi
- call __ecp_nistz256_mul_montx
-
- movq 160(%rsp),%rdx
- leaq 160(%rsp),%rbx
- movq 0+32(%rsp),%r9
- movq 8+32(%rsp),%r10
- leaq -128+32(%rsp),%rsi
- movq 16+32(%rsp),%r11
- movq 24+32(%rsp),%r12
- leaq 192(%rsp),%rdi
- call __ecp_nistz256_mul_montx
-
-
-
-
- xorq %r11,%r11
- addq %r12,%r12
- leaq 96(%rsp),%rsi
- adcq %r13,%r13
- movq %r12,%rax
- adcq %r8,%r8
- adcq %r9,%r9
- movq %r13,%rbp
- adcq $0,%r11
-
- subq $-1,%r12
- movq %r8,%rcx
- sbbq %r14,%r13
- sbbq $0,%r8
- movq %r9,%r10
- sbbq %r15,%r9
- sbbq $0,%r11
-
- cmovcq %rax,%r12
- movq 0(%rsi),%rax
- cmovcq %rbp,%r13
- movq 8(%rsi),%rbp
- cmovcq %rcx,%r8
- movq 16(%rsi),%rcx
- cmovcq %r10,%r9
- movq 24(%rsi),%r10
-
- call __ecp_nistz256_subx
-
- leaq 128(%rsp),%rbx
- leaq 288(%rsp),%rdi
- call __ecp_nistz256_sub_fromx
-
- movq 192+0(%rsp),%rax
- movq 192+8(%rsp),%rbp
- movq 192+16(%rsp),%rcx
- movq 192+24(%rsp),%r10
- leaq 320(%rsp),%rdi
-
- call __ecp_nistz256_subx
-
- movq %r12,0(%rdi)
- movq %r13,8(%rdi)
- movq %r8,16(%rdi)
- movq %r9,24(%rdi)
- movq 128(%rsp),%rdx
- leaq 128(%rsp),%rbx
- movq 0+224(%rsp),%r9
- movq 8+224(%rsp),%r10
- leaq -128+224(%rsp),%rsi
- movq 16+224(%rsp),%r11
- movq 24+224(%rsp),%r12
- leaq 256(%rsp),%rdi
- call __ecp_nistz256_mul_montx
-
- movq 320(%rsp),%rdx
- leaq 320(%rsp),%rbx
- movq 0+64(%rsp),%r9
- movq 8+64(%rsp),%r10
- leaq -128+64(%rsp),%rsi
- movq 16+64(%rsp),%r11
- movq 24+64(%rsp),%r12
- leaq 320(%rsp),%rdi
- call __ecp_nistz256_mul_montx
-
- leaq 256(%rsp),%rbx
- leaq 320(%rsp),%rdi
- call __ecp_nistz256_sub_fromx
-
-.byte 102,72,15,126,199
-
- movdqa %xmm5,%xmm0
- movdqa %xmm5,%xmm1
- pandn 352(%rsp),%xmm0
- movdqa %xmm5,%xmm2
- pandn 352+16(%rsp),%xmm1
- movdqa %xmm5,%xmm3
- pand 544(%rsp),%xmm2
- pand 544+16(%rsp),%xmm3
- por %xmm0,%xmm2
- por %xmm1,%xmm3
-
- movdqa %xmm4,%xmm0
- movdqa %xmm4,%xmm1
- pandn %xmm2,%xmm0
- movdqa %xmm4,%xmm2
- pandn %xmm3,%xmm1
- movdqa %xmm4,%xmm3
- pand 448(%rsp),%xmm2
- pand 448+16(%rsp),%xmm3
- por %xmm0,%xmm2
- por %xmm1,%xmm3
- movdqu %xmm2,64(%rdi)
- movdqu %xmm3,80(%rdi)
-
- movdqa %xmm5,%xmm0
- movdqa %xmm5,%xmm1
- pandn 288(%rsp),%xmm0
- movdqa %xmm5,%xmm2
- pandn 288+16(%rsp),%xmm1
- movdqa %xmm5,%xmm3
- pand 480(%rsp),%xmm2
- pand 480+16(%rsp),%xmm3
- por %xmm0,%xmm2
- por %xmm1,%xmm3
-
- movdqa %xmm4,%xmm0
- movdqa %xmm4,%xmm1
- pandn %xmm2,%xmm0
- movdqa %xmm4,%xmm2
- pandn %xmm3,%xmm1
- movdqa %xmm4,%xmm3
- pand 384(%rsp),%xmm2
- pand 384+16(%rsp),%xmm3
- por %xmm0,%xmm2
- por %xmm1,%xmm3
- movdqu %xmm2,0(%rdi)
- movdqu %xmm3,16(%rdi)
-
- movdqa %xmm5,%xmm0
- movdqa %xmm5,%xmm1
- pandn 320(%rsp),%xmm0
- movdqa %xmm5,%xmm2
- pandn 320+16(%rsp),%xmm1
- movdqa %xmm5,%xmm3
- pand 512(%rsp),%xmm2
- pand 512+16(%rsp),%xmm3
- por %xmm0,%xmm2
- por %xmm1,%xmm3
-
- movdqa %xmm4,%xmm0
- movdqa %xmm4,%xmm1
- pandn %xmm2,%xmm0
- movdqa %xmm4,%xmm2
- pandn %xmm3,%xmm1
- movdqa %xmm4,%xmm3
- pand 416(%rsp),%xmm2
- pand 416+16(%rsp),%xmm3
- por %xmm0,%xmm2
- por %xmm1,%xmm3
- movdqu %xmm2,32(%rdi)
- movdqu %xmm3,48(%rdi)
-
-.Ladd_donex:
- leaq 576+56(%rsp),%rsi
-.cfi_def_cfa %rsi,8
- movq -48(%rsi),%r15
-.cfi_restore %r15
- movq -40(%rsi),%r14
-.cfi_restore %r14
- movq -32(%rsi),%r13
-.cfi_restore %r13
- movq -24(%rsi),%r12
-.cfi_restore %r12
- movq -16(%rsi),%rbx
-.cfi_restore %rbx
- movq -8(%rsi),%rbp
-.cfi_restore %rbp
- leaq (%rsi),%rsp
-.cfi_def_cfa_register %rsp
-.Lpoint_addx_epilogue:
- .byte 0xf3,0xc3
-.cfi_endproc
-.size ecp_nistz256_point_addx,.-ecp_nistz256_point_addx
-.type ecp_nistz256_point_add_affinex,@function
-.align 32
-ecp_nistz256_point_add_affinex:
-.cfi_startproc
-.Lpoint_add_affinex:
- pushq %rbp
-.cfi_adjust_cfa_offset 8
-.cfi_offset %rbp,-16
- pushq %rbx
-.cfi_adjust_cfa_offset 8
-.cfi_offset %rbx,-24
- pushq %r12
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r12,-32
- pushq %r13
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r13,-40
- pushq %r14
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r14,-48
- pushq %r15
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r15,-56
- subq $480+8,%rsp
-.cfi_adjust_cfa_offset 32*15+8
-.Ladd_affinex_body:
-
- movdqu 0(%rsi),%xmm0
- movq %rdx,%rbx
- movdqu 16(%rsi),%xmm1
- movdqu 32(%rsi),%xmm2
- movdqu 48(%rsi),%xmm3
- movdqu 64(%rsi),%xmm4
- movdqu 80(%rsi),%xmm5
- movq 64+0(%rsi),%rdx
- movq 64+8(%rsi),%r14
- movq 64+16(%rsi),%r15
- movq 64+24(%rsi),%r8
- movdqa %xmm0,320(%rsp)
- movdqa %xmm1,320+16(%rsp)
- movdqa %xmm2,352(%rsp)
- movdqa %xmm3,352+16(%rsp)
- movdqa %xmm4,384(%rsp)
- movdqa %xmm5,384+16(%rsp)
- por %xmm4,%xmm5
-
- movdqu 0(%rbx),%xmm0
- pshufd $0xb1,%xmm5,%xmm3
- movdqu 16(%rbx),%xmm1
- movdqu 32(%rbx),%xmm2
- por %xmm3,%xmm5
- movdqu 48(%rbx),%xmm3
- movdqa %xmm0,416(%rsp)
- pshufd $0x1e,%xmm5,%xmm4
- movdqa %xmm1,416+16(%rsp)
- por %xmm0,%xmm1
-.byte 102,72,15,110,199
- movdqa %xmm2,448(%rsp)
- movdqa %xmm3,448+16(%rsp)
- por %xmm2,%xmm3
- por %xmm4,%xmm5
- pxor %xmm4,%xmm4
- por %xmm1,%xmm3
-
- leaq 64-128(%rsi),%rsi
- leaq 32(%rsp),%rdi
- call __ecp_nistz256_sqr_montx
-
- pcmpeqd %xmm4,%xmm5
- pshufd $0xb1,%xmm3,%xmm4
- movq 0(%rbx),%rdx
-
- movq %r12,%r9
- por %xmm3,%xmm4
- pshufd $0,%xmm5,%xmm5
- pshufd $0x1e,%xmm4,%xmm3
- movq %r13,%r10
- por %xmm3,%xmm4
- pxor %xmm3,%xmm3
- movq %r14,%r11
- pcmpeqd %xmm3,%xmm4
- pshufd $0,%xmm4,%xmm4
-
- leaq 32-128(%rsp),%rsi
- movq %r15,%r12
- leaq 0(%rsp),%rdi
- call __ecp_nistz256_mul_montx
-
- leaq 320(%rsp),%rbx
- leaq 64(%rsp),%rdi
- call __ecp_nistz256_sub_fromx
-
- movq 384(%rsp),%rdx
- leaq 384(%rsp),%rbx
- movq 0+32(%rsp),%r9
- movq 8+32(%rsp),%r10
- leaq -128+32(%rsp),%rsi
- movq 16+32(%rsp),%r11
- movq 24+32(%rsp),%r12
- leaq 32(%rsp),%rdi
- call __ecp_nistz256_mul_montx
-
- movq 384(%rsp),%rdx
- leaq 384(%rsp),%rbx
- movq 0+64(%rsp),%r9
- movq 8+64(%rsp),%r10
- leaq -128+64(%rsp),%rsi
- movq 16+64(%rsp),%r11
- movq 24+64(%rsp),%r12
- leaq 288(%rsp),%rdi
- call __ecp_nistz256_mul_montx
-
- movq 448(%rsp),%rdx
- leaq 448(%rsp),%rbx
- movq 0+32(%rsp),%r9
- movq 8+32(%rsp),%r10
- leaq -128+32(%rsp),%rsi
- movq 16+32(%rsp),%r11
- movq 24+32(%rsp),%r12
- leaq 32(%rsp),%rdi
- call __ecp_nistz256_mul_montx
-
- leaq 352(%rsp),%rbx
- leaq 96(%rsp),%rdi
- call __ecp_nistz256_sub_fromx
-
- movq 0+64(%rsp),%rdx
- movq 8+64(%rsp),%r14
- leaq -128+64(%rsp),%rsi
- movq 16+64(%rsp),%r15
- movq 24+64(%rsp),%r8
- leaq 128(%rsp),%rdi
- call __ecp_nistz256_sqr_montx
-
- movq 0+96(%rsp),%rdx
- movq 8+96(%rsp),%r14
- leaq -128+96(%rsp),%rsi
- movq 16+96(%rsp),%r15
- movq 24+96(%rsp),%r8
- leaq 192(%rsp),%rdi
- call __ecp_nistz256_sqr_montx
-
- movq 128(%rsp),%rdx
- leaq 128(%rsp),%rbx
- movq 0+64(%rsp),%r9
- movq 8+64(%rsp),%r10
- leaq -128+64(%rsp),%rsi
- movq 16+64(%rsp),%r11
- movq 24+64(%rsp),%r12
- leaq 160(%rsp),%rdi
- call __ecp_nistz256_mul_montx
-
- movq 320(%rsp),%rdx
- leaq 320(%rsp),%rbx
- movq 0+128(%rsp),%r9
- movq 8+128(%rsp),%r10
- leaq -128+128(%rsp),%rsi
- movq 16+128(%rsp),%r11
- movq 24+128(%rsp),%r12
- leaq 0(%rsp),%rdi
- call __ecp_nistz256_mul_montx
-
-
-
-
- xorq %r11,%r11
- addq %r12,%r12
- leaq 192(%rsp),%rsi
- adcq %r13,%r13
- movq %r12,%rax
- adcq %r8,%r8
- adcq %r9,%r9
- movq %r13,%rbp
- adcq $0,%r11
-
- subq $-1,%r12
- movq %r8,%rcx
- sbbq %r14,%r13
- sbbq $0,%r8
- movq %r9,%r10
- sbbq %r15,%r9
- sbbq $0,%r11
-
- cmovcq %rax,%r12
- movq 0(%rsi),%rax
- cmovcq %rbp,%r13
- movq 8(%rsi),%rbp
- cmovcq %rcx,%r8
- movq 16(%rsi),%rcx
- cmovcq %r10,%r9
- movq 24(%rsi),%r10
-
- call __ecp_nistz256_subx
-
- leaq 160(%rsp),%rbx
- leaq 224(%rsp),%rdi
- call __ecp_nistz256_sub_fromx
-
- movq 0+0(%rsp),%rax
- movq 0+8(%rsp),%rbp
- movq 0+16(%rsp),%rcx
- movq 0+24(%rsp),%r10
- leaq 64(%rsp),%rdi
-
- call __ecp_nistz256_subx
-
- movq %r12,0(%rdi)
- movq %r13,8(%rdi)
- movq %r8,16(%rdi)
- movq %r9,24(%rdi)
- movq 352(%rsp),%rdx
- leaq 352(%rsp),%rbx
- movq 0+160(%rsp),%r9
- movq 8+160(%rsp),%r10
- leaq -128+160(%rsp),%rsi
- movq 16+160(%rsp),%r11
- movq 24+160(%rsp),%r12
- leaq 32(%rsp),%rdi
- call __ecp_nistz256_mul_montx
-
- movq 96(%rsp),%rdx
- leaq 96(%rsp),%rbx
- movq 0+64(%rsp),%r9
- movq 8+64(%rsp),%r10
- leaq -128+64(%rsp),%rsi
- movq 16+64(%rsp),%r11
- movq 24+64(%rsp),%r12
- leaq 64(%rsp),%rdi
- call __ecp_nistz256_mul_montx
-
- leaq 32(%rsp),%rbx
- leaq 256(%rsp),%rdi
- call __ecp_nistz256_sub_fromx
-
-.byte 102,72,15,126,199
-
- movdqa %xmm5,%xmm0
- movdqa %xmm5,%xmm1
- pandn 288(%rsp),%xmm0
- movdqa %xmm5,%xmm2
- pandn 288+16(%rsp),%xmm1
- movdqa %xmm5,%xmm3
- pand .LONE_mont(%rip),%xmm2
- pand .LONE_mont+16(%rip),%xmm3
- por %xmm0,%xmm2
- por %xmm1,%xmm3
-
- movdqa %xmm4,%xmm0
- movdqa %xmm4,%xmm1
- pandn %xmm2,%xmm0
- movdqa %xmm4,%xmm2
- pandn %xmm3,%xmm1
- movdqa %xmm4,%xmm3
- pand 384(%rsp),%xmm2
- pand 384+16(%rsp),%xmm3
- por %xmm0,%xmm2
- por %xmm1,%xmm3
- movdqu %xmm2,64(%rdi)
- movdqu %xmm3,80(%rdi)
-
- movdqa %xmm5,%xmm0
- movdqa %xmm5,%xmm1
- pandn 224(%rsp),%xmm0
- movdqa %xmm5,%xmm2
- pandn 224+16(%rsp),%xmm1
- movdqa %xmm5,%xmm3
- pand 416(%rsp),%xmm2
- pand 416+16(%rsp),%xmm3
- por %xmm0,%xmm2
- por %xmm1,%xmm3
-
- movdqa %xmm4,%xmm0
- movdqa %xmm4,%xmm1
- pandn %xmm2,%xmm0
- movdqa %xmm4,%xmm2
- pandn %xmm3,%xmm1
- movdqa %xmm4,%xmm3
- pand 320(%rsp),%xmm2
- pand 320+16(%rsp),%xmm3
- por %xmm0,%xmm2
- por %xmm1,%xmm3
- movdqu %xmm2,0(%rdi)
- movdqu %xmm3,16(%rdi)
-
- movdqa %xmm5,%xmm0
- movdqa %xmm5,%xmm1
- pandn 256(%rsp),%xmm0
- movdqa %xmm5,%xmm2
- pandn 256+16(%rsp),%xmm1
- movdqa %xmm5,%xmm3
- pand 448(%rsp),%xmm2
- pand 448+16(%rsp),%xmm3
- por %xmm0,%xmm2
- por %xmm1,%xmm3
-
- movdqa %xmm4,%xmm0
- movdqa %xmm4,%xmm1
- pandn %xmm2,%xmm0
- movdqa %xmm4,%xmm2
- pandn %xmm3,%xmm1
- movdqa %xmm4,%xmm3
- pand 352(%rsp),%xmm2
- pand 352+16(%rsp),%xmm3
- por %xmm0,%xmm2
- por %xmm1,%xmm3
- movdqu %xmm2,32(%rdi)
- movdqu %xmm3,48(%rdi)
-
- leaq 480+56(%rsp),%rsi
-.cfi_def_cfa %rsi,8
- movq -48(%rsi),%r15
-.cfi_restore %r15
- movq -40(%rsi),%r14
-.cfi_restore %r14
- movq -32(%rsi),%r13
-.cfi_restore %r13
- movq -24(%rsi),%r12
-.cfi_restore %r12
- movq -16(%rsi),%rbx
-.cfi_restore %rbx
- movq -8(%rsi),%rbp
-.cfi_restore %rbp
- leaq (%rsi),%rsp
-.cfi_def_cfa_register %rsp
-.Ladd_affinex_epilogue:
- .byte 0xf3,0xc3
-.cfi_endproc
-.size ecp_nistz256_point_add_affinex,.-ecp_nistz256_point_add_affinex
diff --git a/secure/lib/libcrypto/amd64/ghash-x86_64.S b/secure/lib/libcrypto/amd64/ghash-x86_64.S
index 078353528d5f5..55ad7db1f2406 100644
--- a/secure/lib/libcrypto/amd64/ghash-x86_64.S
+++ b/secure/lib/libcrypto/amd64/ghash-x86_64.S
@@ -1304,108 +1304,7 @@ gcm_ghash_clmul:
.align 32
gcm_init_avx:
.cfi_startproc
- vzeroupper
-
- vmovdqu (%rsi),%xmm2
- vpshufd $78,%xmm2,%xmm2
-
-
- vpshufd $255,%xmm2,%xmm4
- vpsrlq $63,%xmm2,%xmm3
- vpsllq $1,%xmm2,%xmm2
- vpxor %xmm5,%xmm5,%xmm5
- vpcmpgtd %xmm4,%xmm5,%xmm5
- vpslldq $8,%xmm3,%xmm3
- vpor %xmm3,%xmm2,%xmm2
-
-
- vpand .L0x1c2_polynomial(%rip),%xmm5,%xmm5
- vpxor %xmm5,%xmm2,%xmm2
-
- vpunpckhqdq %xmm2,%xmm2,%xmm6
- vmovdqa %xmm2,%xmm0
- vpxor %xmm2,%xmm6,%xmm6
- movq $4,%r10
- jmp .Linit_start_avx
-.align 32
-.Linit_loop_avx:
- vpalignr $8,%xmm3,%xmm4,%xmm5
- vmovdqu %xmm5,-16(%rdi)
- vpunpckhqdq %xmm0,%xmm0,%xmm3
- vpxor %xmm0,%xmm3,%xmm3
- vpclmulqdq $0x11,%xmm2,%xmm0,%xmm1
- vpclmulqdq $0x00,%xmm2,%xmm0,%xmm0
- vpclmulqdq $0x00,%xmm6,%xmm3,%xmm3
- vpxor %xmm0,%xmm1,%xmm4
- vpxor %xmm4,%xmm3,%xmm3
-
- vpslldq $8,%xmm3,%xmm4
- vpsrldq $8,%xmm3,%xmm3
- vpxor %xmm4,%xmm0,%xmm0
- vpxor %xmm3,%xmm1,%xmm1
- vpsllq $57,%xmm0,%xmm3
- vpsllq $62,%xmm0,%xmm4
- vpxor %xmm3,%xmm4,%xmm4
- vpsllq $63,%xmm0,%xmm3
- vpxor %xmm3,%xmm4,%xmm4
- vpslldq $8,%xmm4,%xmm3
- vpsrldq $8,%xmm4,%xmm4
- vpxor %xmm3,%xmm0,%xmm0
- vpxor %xmm4,%xmm1,%xmm1
-
- vpsrlq $1,%xmm0,%xmm4
- vpxor %xmm0,%xmm1,%xmm1
- vpxor %xmm4,%xmm0,%xmm0
- vpsrlq $5,%xmm4,%xmm4
- vpxor %xmm4,%xmm0,%xmm0
- vpsrlq $1,%xmm0,%xmm0
- vpxor %xmm1,%xmm0,%xmm0
-.Linit_start_avx:
- vmovdqa %xmm0,%xmm5
- vpunpckhqdq %xmm0,%xmm0,%xmm3
- vpxor %xmm0,%xmm3,%xmm3
- vpclmulqdq $0x11,%xmm2,%xmm0,%xmm1
- vpclmulqdq $0x00,%xmm2,%xmm0,%xmm0
- vpclmulqdq $0x00,%xmm6,%xmm3,%xmm3
- vpxor %xmm0,%xmm1,%xmm4
- vpxor %xmm4,%xmm3,%xmm3
-
- vpslldq $8,%xmm3,%xmm4
- vpsrldq $8,%xmm3,%xmm3
- vpxor %xmm4,%xmm0,%xmm0
- vpxor %xmm3,%xmm1,%xmm1
- vpsllq $57,%xmm0,%xmm3
- vpsllq $62,%xmm0,%xmm4
- vpxor %xmm3,%xmm4,%xmm4
- vpsllq $63,%xmm0,%xmm3
- vpxor %xmm3,%xmm4,%xmm4
- vpslldq $8,%xmm4,%xmm3
- vpsrldq $8,%xmm4,%xmm4
- vpxor %xmm3,%xmm0,%xmm0
- vpxor %xmm4,%xmm1,%xmm1
-
- vpsrlq $1,%xmm0,%xmm4
- vpxor %xmm0,%xmm1,%xmm1
- vpxor %xmm4,%xmm0,%xmm0
- vpsrlq $5,%xmm4,%xmm4
- vpxor %xmm4,%xmm0,%xmm0
- vpsrlq $1,%xmm0,%xmm0
- vpxor %xmm1,%xmm0,%xmm0
- vpshufd $78,%xmm5,%xmm3
- vpshufd $78,%xmm0,%xmm4
- vpxor %xmm5,%xmm3,%xmm3
- vmovdqu %xmm5,0(%rdi)
- vpxor %xmm0,%xmm4,%xmm4
- vmovdqu %xmm0,16(%rdi)
- leaq 48(%rdi),%rdi
- subq $1,%r10
- jnz .Linit_loop_avx
-
- vpalignr $8,%xmm4,%xmm3,%xmm5
- vmovdqu %xmm5,-16(%rdi)
-
- vzeroupper
- .byte 0xf3,0xc3
+ jmp .L_init_clmul
.cfi_endproc
.size gcm_init_avx,.-gcm_init_avx
.globl gcm_gmult_avx
@@ -1421,377 +1320,7 @@ gcm_gmult_avx:
.align 32
gcm_ghash_avx:
.cfi_startproc
- vzeroupper
-
- vmovdqu (%rdi),%xmm10
- leaq .L0x1c2_polynomial(%rip),%r10
- leaq 64(%rsi),%rsi
- vmovdqu .Lbswap_mask(%rip),%xmm13
- vpshufb %xmm13,%xmm10,%xmm10
- cmpq $0x80,%rcx
- jb .Lshort_avx
- subq $0x80,%rcx
-
- vmovdqu 112(%rdx),%xmm14
- vmovdqu 0-64(%rsi),%xmm6
- vpshufb %xmm13,%xmm14,%xmm14
- vmovdqu 32-64(%rsi),%xmm7
-
- vpunpckhqdq %xmm14,%xmm14,%xmm9
- vmovdqu 96(%rdx),%xmm15
- vpclmulqdq $0x00,%xmm6,%xmm14,%xmm0
- vpxor %xmm14,%xmm9,%xmm9
- vpshufb %xmm13,%xmm15,%xmm15
- vpclmulqdq $0x11,%xmm6,%xmm14,%xmm1
- vmovdqu 16-64(%rsi),%xmm6
- vpunpckhqdq %xmm15,%xmm15,%xmm8
- vmovdqu 80(%rdx),%xmm14
- vpclmulqdq $0x00,%xmm7,%xmm9,%xmm2
- vpxor %xmm15,%xmm8,%xmm8
-
- vpshufb %xmm13,%xmm14,%xmm14
- vpclmulqdq $0x00,%xmm6,%xmm15,%xmm3
- vpunpckhqdq %xmm14,%xmm14,%xmm9
- vpclmulqdq $0x11,%xmm6,%xmm15,%xmm4
- vmovdqu 48-64(%rsi),%xmm6
- vpxor %xmm14,%xmm9,%xmm9
- vmovdqu 64(%rdx),%xmm15
- vpclmulqdq $0x10,%xmm7,%xmm8,%xmm5
- vmovdqu 80-64(%rsi),%xmm7
-
- vpshufb %xmm13,%xmm15,%xmm15
- vpxor %xmm0,%xmm3,%xmm3
- vpclmulqdq $0x00,%xmm6,%xmm14,%xmm0
- vpxor %xmm1,%xmm4,%xmm4
- vpunpckhqdq %xmm15,%xmm15,%xmm8
- vpclmulqdq $0x11,%xmm6,%xmm14,%xmm1
- vmovdqu 64-64(%rsi),%xmm6
- vpxor %xmm2,%xmm5,%xmm5
- vpclmulqdq $0x00,%xmm7,%xmm9,%xmm2
- vpxor %xmm15,%xmm8,%xmm8
-
- vmovdqu 48(%rdx),%xmm14
- vpxor %xmm3,%xmm0,%xmm0
- vpclmulqdq $0x00,%xmm6,%xmm15,%xmm3
- vpxor %xmm4,%xmm1,%xmm1
- vpshufb %xmm13,%xmm14,%xmm14
- vpclmulqdq $0x11,%xmm6,%xmm15,%xmm4
- vmovdqu 96-64(%rsi),%xmm6
- vpxor %xmm5,%xmm2,%xmm2
- vpunpckhqdq %xmm14,%xmm14,%xmm9
- vpclmulqdq $0x10,%xmm7,%xmm8,%xmm5
- vmovdqu 128-64(%rsi),%xmm7
- vpxor %xmm14,%xmm9,%xmm9
-
- vmovdqu 32(%rdx),%xmm15
- vpxor %xmm0,%xmm3,%xmm3
- vpclmulqdq $0x00,%xmm6,%xmm14,%xmm0
- vpxor %xmm1,%xmm4,%xmm4
- vpshufb %xmm13,%xmm15,%xmm15
- vpclmulqdq $0x11,%xmm6,%xmm14,%xmm1
- vmovdqu 112-64(%rsi),%xmm6
- vpxor %xmm2,%xmm5,%xmm5
- vpunpckhqdq %xmm15,%xmm15,%xmm8
- vpclmulqdq $0x00,%xmm7,%xmm9,%xmm2
- vpxor %xmm15,%xmm8,%xmm8
-
- vmovdqu 16(%rdx),%xmm14
- vpxor %xmm3,%xmm0,%xmm0
- vpclmulqdq $0x00,%xmm6,%xmm15,%xmm3
- vpxor %xmm4,%xmm1,%xmm1
- vpshufb %xmm13,%xmm14,%xmm14
- vpclmulqdq $0x11,%xmm6,%xmm15,%xmm4
- vmovdqu 144-64(%rsi),%xmm6
- vpxor %xmm5,%xmm2,%xmm2
- vpunpckhqdq %xmm14,%xmm14,%xmm9
- vpclmulqdq $0x10,%xmm7,%xmm8,%xmm5
- vmovdqu 176-64(%rsi),%xmm7
- vpxor %xmm14,%xmm9,%xmm9
-
- vmovdqu (%rdx),%xmm15
- vpxor %xmm0,%xmm3,%xmm3
- vpclmulqdq $0x00,%xmm6,%xmm14,%xmm0
- vpxor %xmm1,%xmm4,%xmm4
- vpshufb %xmm13,%xmm15,%xmm15
- vpclmulqdq $0x11,%xmm6,%xmm14,%xmm1
- vmovdqu 160-64(%rsi),%xmm6
- vpxor %xmm2,%xmm5,%xmm5
- vpclmulqdq $0x10,%xmm7,%xmm9,%xmm2
-
- leaq 128(%rdx),%rdx
- cmpq $0x80,%rcx
- jb .Ltail_avx
-
- vpxor %xmm10,%xmm15,%xmm15
- subq $0x80,%rcx
- jmp .Loop8x_avx
-
-.align 32
-.Loop8x_avx:
- vpunpckhqdq %xmm15,%xmm15,%xmm8
- vmovdqu 112(%rdx),%xmm14
- vpxor %xmm0,%xmm3,%xmm3
- vpxor %xmm15,%xmm8,%xmm8
- vpclmulqdq $0x00,%xmm6,%xmm15,%xmm10
- vpshufb %xmm13,%xmm14,%xmm14
- vpxor %xmm1,%xmm4,%xmm4
- vpclmulqdq $0x11,%xmm6,%xmm15,%xmm11
- vmovdqu 0-64(%rsi),%xmm6
- vpunpckhqdq %xmm14,%xmm14,%xmm9
- vpxor %xmm2,%xmm5,%xmm5
- vpclmulqdq $0x00,%xmm7,%xmm8,%xmm12
- vmovdqu 32-64(%rsi),%xmm7
- vpxor %xmm14,%xmm9,%xmm9
-
- vmovdqu 96(%rdx),%xmm15
- vpclmulqdq $0x00,%xmm6,%xmm14,%xmm0
- vpxor %xmm3,%xmm10,%xmm10
- vpshufb %xmm13,%xmm15,%xmm15
- vpclmulqdq $0x11,%xmm6,%xmm14,%xmm1
- vxorps %xmm4,%xmm11,%xmm11
- vmovdqu 16-64(%rsi),%xmm6
- vpunpckhqdq %xmm15,%xmm15,%xmm8
- vpclmulqdq $0x00,%xmm7,%xmm9,%xmm2
- vpxor %xmm5,%xmm12,%xmm12
- vxorps %xmm15,%xmm8,%xmm8
-
- vmovdqu 80(%rdx),%xmm14
- vpxor %xmm10,%xmm12,%xmm12
- vpclmulqdq $0x00,%xmm6,%xmm15,%xmm3
- vpxor %xmm11,%xmm12,%xmm12
- vpslldq $8,%xmm12,%xmm9
- vpxor %xmm0,%xmm3,%xmm3
- vpclmulqdq $0x11,%xmm6,%xmm15,%xmm4
- vpsrldq $8,%xmm12,%xmm12
- vpxor %xmm9,%xmm10,%xmm10
- vmovdqu 48-64(%rsi),%xmm6
- vpshufb %xmm13,%xmm14,%xmm14
- vxorps %xmm12,%xmm11,%xmm11
- vpxor %xmm1,%xmm4,%xmm4
- vpunpckhqdq %xmm14,%xmm14,%xmm9
- vpclmulqdq $0x10,%xmm7,%xmm8,%xmm5
- vmovdqu 80-64(%rsi),%xmm7
- vpxor %xmm14,%xmm9,%xmm9
- vpxor %xmm2,%xmm5,%xmm5
-
- vmovdqu 64(%rdx),%xmm15
- vpalignr $8,%xmm10,%xmm10,%xmm12
- vpclmulqdq $0x00,%xmm6,%xmm14,%xmm0
- vpshufb %xmm13,%xmm15,%xmm15
- vpxor %xmm3,%xmm0,%xmm0
- vpclmulqdq $0x11,%xmm6,%xmm14,%xmm1
- vmovdqu 64-64(%rsi),%xmm6
- vpunpckhqdq %xmm15,%xmm15,%xmm8
- vpxor %xmm4,%xmm1,%xmm1
- vpclmulqdq $0x00,%xmm7,%xmm9,%xmm2
- vxorps %xmm15,%xmm8,%xmm8
- vpxor %xmm5,%xmm2,%xmm2
-
- vmovdqu 48(%rdx),%xmm14
- vpclmulqdq $0x10,(%r10),%xmm10,%xmm10
- vpclmulqdq $0x00,%xmm6,%xmm15,%xmm3
- vpshufb %xmm13,%xmm14,%xmm14
- vpxor %xmm0,%xmm3,%xmm3
- vpclmulqdq $0x11,%xmm6,%xmm15,%xmm4
- vmovdqu 96-64(%rsi),%xmm6
- vpunpckhqdq %xmm14,%xmm14,%xmm9
- vpxor %xmm1,%xmm4,%xmm4
- vpclmulqdq $0x10,%xmm7,%xmm8,%xmm5
- vmovdqu 128-64(%rsi),%xmm7
- vpxor %xmm14,%xmm9,%xmm9
- vpxor %xmm2,%xmm5,%xmm5
-
- vmovdqu 32(%rdx),%xmm15
- vpclmulqdq $0x00,%xmm6,%xmm14,%xmm0
- vpshufb %xmm13,%xmm15,%xmm15
- vpxor %xmm3,%xmm0,%xmm0
- vpclmulqdq $0x11,%xmm6,%xmm14,%xmm1
- vmovdqu 112-64(%rsi),%xmm6
- vpunpckhqdq %xmm15,%xmm15,%xmm8
- vpxor %xmm4,%xmm1,%xmm1
- vpclmulqdq $0x00,%xmm7,%xmm9,%xmm2
- vpxor %xmm15,%xmm8,%xmm8
- vpxor %xmm5,%xmm2,%xmm2
- vxorps %xmm12,%xmm10,%xmm10
-
- vmovdqu 16(%rdx),%xmm14
- vpalignr $8,%xmm10,%xmm10,%xmm12
- vpclmulqdq $0x00,%xmm6,%xmm15,%xmm3
- vpshufb %xmm13,%xmm14,%xmm14
- vpxor %xmm0,%xmm3,%xmm3
- vpclmulqdq $0x11,%xmm6,%xmm15,%xmm4
- vmovdqu 144-64(%rsi),%xmm6
- vpclmulqdq $0x10,(%r10),%xmm10,%xmm10
- vxorps %xmm11,%xmm12,%xmm12
- vpunpckhqdq %xmm14,%xmm14,%xmm9
- vpxor %xmm1,%xmm4,%xmm4
- vpclmulqdq $0x10,%xmm7,%xmm8,%xmm5
- vmovdqu 176-64(%rsi),%xmm7
- vpxor %xmm14,%xmm9,%xmm9
- vpxor %xmm2,%xmm5,%xmm5
-
- vmovdqu (%rdx),%xmm15
- vpclmulqdq $0x00,%xmm6,%xmm14,%xmm0
- vpshufb %xmm13,%xmm15,%xmm15
- vpclmulqdq $0x11,%xmm6,%xmm14,%xmm1
- vmovdqu 160-64(%rsi),%xmm6
- vpxor %xmm12,%xmm15,%xmm15
- vpclmulqdq $0x10,%xmm7,%xmm9,%xmm2
- vpxor %xmm10,%xmm15,%xmm15
-
- leaq 128(%rdx),%rdx
- subq $0x80,%rcx
- jnc .Loop8x_avx
-
- addq $0x80,%rcx
- jmp .Ltail_no_xor_avx
-
-.align 32
-.Lshort_avx:
- vmovdqu -16(%rdx,%rcx,1),%xmm14
- leaq (%rdx,%rcx,1),%rdx
- vmovdqu 0-64(%rsi),%xmm6
- vmovdqu 32-64(%rsi),%xmm7
- vpshufb %xmm13,%xmm14,%xmm15
-
- vmovdqa %xmm0,%xmm3
- vmovdqa %xmm1,%xmm4
- vmovdqa %xmm2,%xmm5
- subq $0x10,%rcx
- jz .Ltail_avx
-
- vpunpckhqdq %xmm15,%xmm15,%xmm8
- vpxor %xmm0,%xmm3,%xmm3
- vpclmulqdq $0x00,%xmm6,%xmm15,%xmm0
- vpxor %xmm15,%xmm8,%xmm8
- vmovdqu -32(%rdx),%xmm14
- vpxor %xmm1,%xmm4,%xmm4
- vpclmulqdq $0x11,%xmm6,%xmm15,%xmm1
- vmovdqu 16-64(%rsi),%xmm6
- vpshufb %xmm13,%xmm14,%xmm15
- vpxor %xmm2,%xmm5,%xmm5
- vpclmulqdq $0x00,%xmm7,%xmm8,%xmm2
- vpsrldq $8,%xmm7,%xmm7
- subq $0x10,%rcx
- jz .Ltail_avx
-
- vpunpckhqdq %xmm15,%xmm15,%xmm8
- vpxor %xmm0,%xmm3,%xmm3
- vpclmulqdq $0x00,%xmm6,%xmm15,%xmm0
- vpxor %xmm15,%xmm8,%xmm8
- vmovdqu -48(%rdx),%xmm14
- vpxor %xmm1,%xmm4,%xmm4
- vpclmulqdq $0x11,%xmm6,%xmm15,%xmm1
- vmovdqu 48-64(%rsi),%xmm6
- vpshufb %xmm13,%xmm14,%xmm15
- vpxor %xmm2,%xmm5,%xmm5
- vpclmulqdq $0x00,%xmm7,%xmm8,%xmm2
- vmovdqu 80-64(%rsi),%xmm7
- subq $0x10,%rcx
- jz .Ltail_avx
-
- vpunpckhqdq %xmm15,%xmm15,%xmm8
- vpxor %xmm0,%xmm3,%xmm3
- vpclmulqdq $0x00,%xmm6,%xmm15,%xmm0
- vpxor %xmm15,%xmm8,%xmm8
- vmovdqu -64(%rdx),%xmm14
- vpxor %xmm1,%xmm4,%xmm4
- vpclmulqdq $0x11,%xmm6,%xmm15,%xmm1
- vmovdqu 64-64(%rsi),%xmm6
- vpshufb %xmm13,%xmm14,%xmm15
- vpxor %xmm2,%xmm5,%xmm5
- vpclmulqdq $0x00,%xmm7,%xmm8,%xmm2
- vpsrldq $8,%xmm7,%xmm7
- subq $0x10,%rcx
- jz .Ltail_avx
-
- vpunpckhqdq %xmm15,%xmm15,%xmm8
- vpxor %xmm0,%xmm3,%xmm3
- vpclmulqdq $0x00,%xmm6,%xmm15,%xmm0
- vpxor %xmm15,%xmm8,%xmm8
- vmovdqu -80(%rdx),%xmm14
- vpxor %xmm1,%xmm4,%xmm4
- vpclmulqdq $0x11,%xmm6,%xmm15,%xmm1
- vmovdqu 96-64(%rsi),%xmm6
- vpshufb %xmm13,%xmm14,%xmm15
- vpxor %xmm2,%xmm5,%xmm5
- vpclmulqdq $0x00,%xmm7,%xmm8,%xmm2
- vmovdqu 128-64(%rsi),%xmm7
- subq $0x10,%rcx
- jz .Ltail_avx
-
- vpunpckhqdq %xmm15,%xmm15,%xmm8
- vpxor %xmm0,%xmm3,%xmm3
- vpclmulqdq $0x00,%xmm6,%xmm15,%xmm0
- vpxor %xmm15,%xmm8,%xmm8
- vmovdqu -96(%rdx),%xmm14
- vpxor %xmm1,%xmm4,%xmm4
- vpclmulqdq $0x11,%xmm6,%xmm15,%xmm1
- vmovdqu 112-64(%rsi),%xmm6
- vpshufb %xmm13,%xmm14,%xmm15
- vpxor %xmm2,%xmm5,%xmm5
- vpclmulqdq $0x00,%xmm7,%xmm8,%xmm2
- vpsrldq $8,%xmm7,%xmm7
- subq $0x10,%rcx
- jz .Ltail_avx
-
- vpunpckhqdq %xmm15,%xmm15,%xmm8
- vpxor %xmm0,%xmm3,%xmm3
- vpclmulqdq $0x00,%xmm6,%xmm15,%xmm0
- vpxor %xmm15,%xmm8,%xmm8
- vmovdqu -112(%rdx),%xmm14
- vpxor %xmm1,%xmm4,%xmm4
- vpclmulqdq $0x11,%xmm6,%xmm15,%xmm1
- vmovdqu 144-64(%rsi),%xmm6
- vpshufb %xmm13,%xmm14,%xmm15
- vpxor %xmm2,%xmm5,%xmm5
- vpclmulqdq $0x00,%xmm7,%xmm8,%xmm2
- vmovq 184-64(%rsi),%xmm7
- subq $0x10,%rcx
- jmp .Ltail_avx
-
-.align 32
-.Ltail_avx:
- vpxor %xmm10,%xmm15,%xmm15
-.Ltail_no_xor_avx:
- vpunpckhqdq %xmm15,%xmm15,%xmm8
- vpxor %xmm0,%xmm3,%xmm3
- vpclmulqdq $0x00,%xmm6,%xmm15,%xmm0
- vpxor %xmm15,%xmm8,%xmm8
- vpxor %xmm1,%xmm4,%xmm4
- vpclmulqdq $0x11,%xmm6,%xmm15,%xmm1
- vpxor %xmm2,%xmm5,%xmm5
- vpclmulqdq $0x00,%xmm7,%xmm8,%xmm2
-
- vmovdqu (%r10),%xmm12
-
- vpxor %xmm0,%xmm3,%xmm10
- vpxor %xmm1,%xmm4,%xmm11
- vpxor %xmm2,%xmm5,%xmm5
-
- vpxor %xmm10,%xmm5,%xmm5
- vpxor %xmm11,%xmm5,%xmm5
- vpslldq $8,%xmm5,%xmm9
- vpsrldq $8,%xmm5,%xmm5
- vpxor %xmm9,%xmm10,%xmm10
- vpxor %xmm5,%xmm11,%xmm11
-
- vpclmulqdq $0x10,%xmm12,%xmm10,%xmm9
- vpalignr $8,%xmm10,%xmm10,%xmm10
- vpxor %xmm9,%xmm10,%xmm10
-
- vpclmulqdq $0x10,%xmm12,%xmm10,%xmm9
- vpalignr $8,%xmm10,%xmm10,%xmm10
- vpxor %xmm11,%xmm10,%xmm10
- vpxor %xmm9,%xmm10,%xmm10
-
- cmpq $0,%rcx
- jne .Lshort_avx
-
- vpshufb %xmm13,%xmm10,%xmm10
- vmovdqu %xmm10,(%rdi)
- vzeroupper
- .byte 0xf3,0xc3
+ jmp .L_ghash_clmul
.cfi_endproc
.size gcm_ghash_avx,.-gcm_ghash_avx
.align 64
diff --git a/secure/lib/libcrypto/amd64/keccak1600-x86_64.S b/secure/lib/libcrypto/amd64/keccak1600-x86_64.S
index 582740bd2802b..d36758807990b 100644
--- a/secure/lib/libcrypto/amd64/keccak1600-x86_64.S
+++ b/secure/lib/libcrypto/amd64/keccak1600-x86_64.S
@@ -5,6 +5,7 @@
.type __KeccakF1600,@function
.align 32
__KeccakF1600:
+.cfi_startproc
movq 60(%rdi),%rax
movq 68(%rdi),%rbx
movq 76(%rdi),%rcx
@@ -257,6 +258,7 @@ __KeccakF1600:
leaq -192(%r15),%r15
.byte 0xf3,0xc3
+.cfi_endproc
.size __KeccakF1600,.-__KeccakF1600
.type KeccakF1600,@function
diff --git a/secure/lib/libcrypto/amd64/poly1305-x86_64.S b/secure/lib/libcrypto/amd64/poly1305-x86_64.S
index 6973743427f31..d74ee9b450523 100644
--- a/secure/lib/libcrypto/amd64/poly1305-x86_64.S
+++ b/secure/lib/libcrypto/amd64/poly1305-x86_64.S
@@ -14,6 +14,7 @@
.type poly1305_init,@function
.align 32
poly1305_init:
+.cfi_startproc
xorq %rax,%rax
movq %rax,0(%rdi)
movq %rax,8(%rdi)
@@ -24,15 +25,6 @@ poly1305_init:
leaq poly1305_blocks(%rip),%r10
leaq poly1305_emit(%rip),%r11
- movq OPENSSL_ia32cap_P+4(%rip),%r9
- leaq poly1305_blocks_avx(%rip),%rax
- leaq poly1305_emit_avx(%rip),%rcx
- btq $28,%r9
- cmovcq %rax,%r10
- cmovcq %rcx,%r11
- leaq poly1305_blocks_avx2(%rip),%rax
- btq $37,%r9
- cmovcq %rax,%r10
movq $0x0ffffffc0fffffff,%rax
movq $0x0ffffffc0ffffffc,%rcx
andq 0(%rsi),%rax
@@ -44,6 +36,7 @@ poly1305_init:
movl $1,%eax
.Lno_key:
.byte 0xf3,0xc3
+.cfi_endproc
.size poly1305_init,.-poly1305_init
.type poly1305_blocks,@function
@@ -164,6 +157,7 @@ poly1305_blocks:
.type poly1305_emit,@function
.align 32
poly1305_emit:
+.cfi_startproc
.Lemit:
movq 0(%rdi),%r8
movq 8(%rdi),%r9
@@ -184,1783 +178,15 @@ poly1305_emit:
movq %rcx,8(%rsi)
.byte 0xf3,0xc3
-.size poly1305_emit,.-poly1305_emit
-.type __poly1305_block,@function
-.align 32
-__poly1305_block:
- mulq %r14
- movq %rax,%r9
- movq %r11,%rax
- movq %rdx,%r10
-
- mulq %r14
- movq %rax,%r14
- movq %r11,%rax
- movq %rdx,%r8
-
- mulq %rbx
- addq %rax,%r9
- movq %r13,%rax
- adcq %rdx,%r10
-
- mulq %rbx
- movq %rbp,%rbx
- addq %rax,%r14
- adcq %rdx,%r8
-
- imulq %r13,%rbx
- addq %rbx,%r9
- movq %r8,%rbx
- adcq $0,%r10
-
- imulq %r11,%rbp
- addq %r9,%rbx
- movq $-4,%rax
- adcq %rbp,%r10
-
- andq %r10,%rax
- movq %r10,%rbp
- shrq $2,%r10
- andq $3,%rbp
- addq %r10,%rax
- addq %rax,%r14
- adcq $0,%rbx
- adcq $0,%rbp
- .byte 0xf3,0xc3
-.size __poly1305_block,.-__poly1305_block
-
-.type __poly1305_init_avx,@function
-.align 32
-__poly1305_init_avx:
- movq %r11,%r14
- movq %r12,%rbx
- xorq %rbp,%rbp
-
- leaq 48+64(%rdi),%rdi
-
- movq %r12,%rax
- call __poly1305_block
-
- movl $0x3ffffff,%eax
- movl $0x3ffffff,%edx
- movq %r14,%r8
- andl %r14d,%eax
- movq %r11,%r9
- andl %r11d,%edx
- movl %eax,-64(%rdi)
- shrq $26,%r8
- movl %edx,-60(%rdi)
- shrq $26,%r9
-
- movl $0x3ffffff,%eax
- movl $0x3ffffff,%edx
- andl %r8d,%eax
- andl %r9d,%edx
- movl %eax,-48(%rdi)
- leal (%rax,%rax,4),%eax
- movl %edx,-44(%rdi)
- leal (%rdx,%rdx,4),%edx
- movl %eax,-32(%rdi)
- shrq $26,%r8
- movl %edx,-28(%rdi)
- shrq $26,%r9
-
- movq %rbx,%rax
- movq %r12,%rdx
- shlq $12,%rax
- shlq $12,%rdx
- orq %r8,%rax
- orq %r9,%rdx
- andl $0x3ffffff,%eax
- andl $0x3ffffff,%edx
- movl %eax,-16(%rdi)
- leal (%rax,%rax,4),%eax
- movl %edx,-12(%rdi)
- leal (%rdx,%rdx,4),%edx
- movl %eax,0(%rdi)
- movq %rbx,%r8
- movl %edx,4(%rdi)
- movq %r12,%r9
-
- movl $0x3ffffff,%eax
- movl $0x3ffffff,%edx
- shrq $14,%r8
- shrq $14,%r9
- andl %r8d,%eax
- andl %r9d,%edx
- movl %eax,16(%rdi)
- leal (%rax,%rax,4),%eax
- movl %edx,20(%rdi)
- leal (%rdx,%rdx,4),%edx
- movl %eax,32(%rdi)
- shrq $26,%r8
- movl %edx,36(%rdi)
- shrq $26,%r9
-
- movq %rbp,%rax
- shlq $24,%rax
- orq %rax,%r8
- movl %r8d,48(%rdi)
- leaq (%r8,%r8,4),%r8
- movl %r9d,52(%rdi)
- leaq (%r9,%r9,4),%r9
- movl %r8d,64(%rdi)
- movl %r9d,68(%rdi)
-
- movq %r12,%rax
- call __poly1305_block
-
- movl $0x3ffffff,%eax
- movq %r14,%r8
- andl %r14d,%eax
- shrq $26,%r8
- movl %eax,-52(%rdi)
-
- movl $0x3ffffff,%edx
- andl %r8d,%edx
- movl %edx,-36(%rdi)
- leal (%rdx,%rdx,4),%edx
- shrq $26,%r8
- movl %edx,-20(%rdi)
-
- movq %rbx,%rax
- shlq $12,%rax
- orq %r8,%rax
- andl $0x3ffffff,%eax
- movl %eax,-4(%rdi)
- leal (%rax,%rax,4),%eax
- movq %rbx,%r8
- movl %eax,12(%rdi)
-
- movl $0x3ffffff,%edx
- shrq $14,%r8
- andl %r8d,%edx
- movl %edx,28(%rdi)
- leal (%rdx,%rdx,4),%edx
- shrq $26,%r8
- movl %edx,44(%rdi)
-
- movq %rbp,%rax
- shlq $24,%rax
- orq %rax,%r8
- movl %r8d,60(%rdi)
- leaq (%r8,%r8,4),%r8
- movl %r8d,76(%rdi)
-
- movq %r12,%rax
- call __poly1305_block
-
- movl $0x3ffffff,%eax
- movq %r14,%r8
- andl %r14d,%eax
- shrq $26,%r8
- movl %eax,-56(%rdi)
-
- movl $0x3ffffff,%edx
- andl %r8d,%edx
- movl %edx,-40(%rdi)
- leal (%rdx,%rdx,4),%edx
- shrq $26,%r8
- movl %edx,-24(%rdi)
-
- movq %rbx,%rax
- shlq $12,%rax
- orq %r8,%rax
- andl $0x3ffffff,%eax
- movl %eax,-8(%rdi)
- leal (%rax,%rax,4),%eax
- movq %rbx,%r8
- movl %eax,8(%rdi)
-
- movl $0x3ffffff,%edx
- shrq $14,%r8
- andl %r8d,%edx
- movl %edx,24(%rdi)
- leal (%rdx,%rdx,4),%edx
- shrq $26,%r8
- movl %edx,40(%rdi)
-
- movq %rbp,%rax
- shlq $24,%rax
- orq %rax,%r8
- movl %r8d,56(%rdi)
- leaq (%r8,%r8,4),%r8
- movl %r8d,72(%rdi)
-
- leaq -48-64(%rdi),%rdi
- .byte 0xf3,0xc3
-.size __poly1305_init_avx,.-__poly1305_init_avx
-
-.type poly1305_blocks_avx,@function
-.align 32
-poly1305_blocks_avx:
-.cfi_startproc
- movl 20(%rdi),%r8d
- cmpq $128,%rdx
- jae .Lblocks_avx
- testl %r8d,%r8d
- jz .Lblocks
-
-.Lblocks_avx:
- andq $-16,%rdx
- jz .Lno_data_avx
-
- vzeroupper
-
- testl %r8d,%r8d
- jz .Lbase2_64_avx
-
- testq $31,%rdx
- jz .Leven_avx
-
- pushq %rbx
-.cfi_adjust_cfa_offset 8
-.cfi_offset %rbx,-16
- pushq %rbp
-.cfi_adjust_cfa_offset 8
-.cfi_offset %rbp,-24
- pushq %r12
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r12,-32
- pushq %r13
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r13,-40
- pushq %r14
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r14,-48
- pushq %r15
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r15,-56
-.Lblocks_avx_body:
-
- movq %rdx,%r15
-
- movq 0(%rdi),%r8
- movq 8(%rdi),%r9
- movl 16(%rdi),%ebp
-
- movq 24(%rdi),%r11
- movq 32(%rdi),%r13
-
-
- movl %r8d,%r14d
- andq $-2147483648,%r8
- movq %r9,%r12
- movl %r9d,%ebx
- andq $-2147483648,%r9
-
- shrq $6,%r8
- shlq $52,%r12
- addq %r8,%r14
- shrq $12,%rbx
- shrq $18,%r9
- addq %r12,%r14
- adcq %r9,%rbx
-
- movq %rbp,%r8
- shlq $40,%r8
- shrq $24,%rbp
- addq %r8,%rbx
- adcq $0,%rbp
-
- movq $-4,%r9
- movq %rbp,%r8
- andq %rbp,%r9
- shrq $2,%r8
- andq $3,%rbp
- addq %r9,%r8
- addq %r8,%r14
- adcq $0,%rbx
- adcq $0,%rbp
-
- movq %r13,%r12
- movq %r13,%rax
- shrq $2,%r13
- addq %r12,%r13
-
- addq 0(%rsi),%r14
- adcq 8(%rsi),%rbx
- leaq 16(%rsi),%rsi
- adcq %rcx,%rbp
-
- call __poly1305_block
-
- testq %rcx,%rcx
- jz .Lstore_base2_64_avx
-
-
- movq %r14,%rax
- movq %r14,%rdx
- shrq $52,%r14
- movq %rbx,%r11
- movq %rbx,%r12
- shrq $26,%rdx
- andq $0x3ffffff,%rax
- shlq $12,%r11
- andq $0x3ffffff,%rdx
- shrq $14,%rbx
- orq %r11,%r14
- shlq $24,%rbp
- andq $0x3ffffff,%r14
- shrq $40,%r12
- andq $0x3ffffff,%rbx
- orq %r12,%rbp
-
- subq $16,%r15
- jz .Lstore_base2_26_avx
-
- vmovd %eax,%xmm0
- vmovd %edx,%xmm1
- vmovd %r14d,%xmm2
- vmovd %ebx,%xmm3
- vmovd %ebp,%xmm4
- jmp .Lproceed_avx
-
-.align 32
-.Lstore_base2_64_avx:
- movq %r14,0(%rdi)
- movq %rbx,8(%rdi)
- movq %rbp,16(%rdi)
- jmp .Ldone_avx
-
-.align 16
-.Lstore_base2_26_avx:
- movl %eax,0(%rdi)
- movl %edx,4(%rdi)
- movl %r14d,8(%rdi)
- movl %ebx,12(%rdi)
- movl %ebp,16(%rdi)
-.align 16
-.Ldone_avx:
- movq 0(%rsp),%r15
-.cfi_restore %r15
- movq 8(%rsp),%r14
-.cfi_restore %r14
- movq 16(%rsp),%r13
-.cfi_restore %r13
- movq 24(%rsp),%r12
-.cfi_restore %r12
- movq 32(%rsp),%rbp
-.cfi_restore %rbp
- movq 40(%rsp),%rbx
-.cfi_restore %rbx
- leaq 48(%rsp),%rsp
-.cfi_adjust_cfa_offset -48
-.Lno_data_avx:
-.Lblocks_avx_epilogue:
- .byte 0xf3,0xc3
-.cfi_endproc
-
-.align 32
-.Lbase2_64_avx:
-.cfi_startproc
- pushq %rbx
-.cfi_adjust_cfa_offset 8
-.cfi_offset %rbx,-16
- pushq %rbp
-.cfi_adjust_cfa_offset 8
-.cfi_offset %rbp,-24
- pushq %r12
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r12,-32
- pushq %r13
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r13,-40
- pushq %r14
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r14,-48
- pushq %r15
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r15,-56
-.Lbase2_64_avx_body:
-
- movq %rdx,%r15
-
- movq 24(%rdi),%r11
- movq 32(%rdi),%r13
-
- movq 0(%rdi),%r14
- movq 8(%rdi),%rbx
- movl 16(%rdi),%ebp
-
- movq %r13,%r12
- movq %r13,%rax
- shrq $2,%r13
- addq %r12,%r13
-
- testq $31,%rdx
- jz .Linit_avx
-
- addq 0(%rsi),%r14
- adcq 8(%rsi),%rbx
- leaq 16(%rsi),%rsi
- adcq %rcx,%rbp
- subq $16,%r15
-
- call __poly1305_block
-
-.Linit_avx:
-
- movq %r14,%rax
- movq %r14,%rdx
- shrq $52,%r14
- movq %rbx,%r8
- movq %rbx,%r9
- shrq $26,%rdx
- andq $0x3ffffff,%rax
- shlq $12,%r8
- andq $0x3ffffff,%rdx
- shrq $14,%rbx
- orq %r8,%r14
- shlq $24,%rbp
- andq $0x3ffffff,%r14
- shrq $40,%r9
- andq $0x3ffffff,%rbx
- orq %r9,%rbp
-
- vmovd %eax,%xmm0
- vmovd %edx,%xmm1
- vmovd %r14d,%xmm2
- vmovd %ebx,%xmm3
- vmovd %ebp,%xmm4
- movl $1,20(%rdi)
-
- call __poly1305_init_avx
-
-.Lproceed_avx:
- movq %r15,%rdx
-
- movq 0(%rsp),%r15
-.cfi_restore %r15
- movq 8(%rsp),%r14
-.cfi_restore %r14
- movq 16(%rsp),%r13
-.cfi_restore %r13
- movq 24(%rsp),%r12
-.cfi_restore %r12
- movq 32(%rsp),%rbp
-.cfi_restore %rbp
- movq 40(%rsp),%rbx
-.cfi_restore %rbx
- leaq 48(%rsp),%rax
- leaq 48(%rsp),%rsp
-.cfi_adjust_cfa_offset -48
-.Lbase2_64_avx_epilogue:
- jmp .Ldo_avx
-.cfi_endproc
-
-.align 32
-.Leven_avx:
-.cfi_startproc
- vmovd 0(%rdi),%xmm0
- vmovd 4(%rdi),%xmm1
- vmovd 8(%rdi),%xmm2
- vmovd 12(%rdi),%xmm3
- vmovd 16(%rdi),%xmm4
-
-.Ldo_avx:
- leaq -88(%rsp),%r11
-.cfi_def_cfa %r11,0x60
- subq $0x178,%rsp
- subq $64,%rdx
- leaq -32(%rsi),%rax
- cmovcq %rax,%rsi
-
- vmovdqu 48(%rdi),%xmm14
- leaq 112(%rdi),%rdi
- leaq .Lconst(%rip),%rcx
-
-
-
- vmovdqu 32(%rsi),%xmm5
- vmovdqu 48(%rsi),%xmm6
- vmovdqa 64(%rcx),%xmm15
-
- vpsrldq $6,%xmm5,%xmm7
- vpsrldq $6,%xmm6,%xmm8
- vpunpckhqdq %xmm6,%xmm5,%xmm9
- vpunpcklqdq %xmm6,%xmm5,%xmm5
- vpunpcklqdq %xmm8,%xmm7,%xmm8
-
- vpsrlq $40,%xmm9,%xmm9
- vpsrlq $26,%xmm5,%xmm6
- vpand %xmm15,%xmm5,%xmm5
- vpsrlq $4,%xmm8,%xmm7
- vpand %xmm15,%xmm6,%xmm6
- vpsrlq $30,%xmm8,%xmm8
- vpand %xmm15,%xmm7,%xmm7
- vpand %xmm15,%xmm8,%xmm8
- vpor 32(%rcx),%xmm9,%xmm9
-
- jbe .Lskip_loop_avx
-
-
- vmovdqu -48(%rdi),%xmm11
- vmovdqu -32(%rdi),%xmm12
- vpshufd $0xEE,%xmm14,%xmm13
- vpshufd $0x44,%xmm14,%xmm10
- vmovdqa %xmm13,-144(%r11)
- vmovdqa %xmm10,0(%rsp)
- vpshufd $0xEE,%xmm11,%xmm14
- vmovdqu -16(%rdi),%xmm10
- vpshufd $0x44,%xmm11,%xmm11
- vmovdqa %xmm14,-128(%r11)
- vmovdqa %xmm11,16(%rsp)
- vpshufd $0xEE,%xmm12,%xmm13
- vmovdqu 0(%rdi),%xmm11
- vpshufd $0x44,%xmm12,%xmm12
- vmovdqa %xmm13,-112(%r11)
- vmovdqa %xmm12,32(%rsp)
- vpshufd $0xEE,%xmm10,%xmm14
- vmovdqu 16(%rdi),%xmm12
- vpshufd $0x44,%xmm10,%xmm10
- vmovdqa %xmm14,-96(%r11)
- vmovdqa %xmm10,48(%rsp)
- vpshufd $0xEE,%xmm11,%xmm13
- vmovdqu 32(%rdi),%xmm10
- vpshufd $0x44,%xmm11,%xmm11
- vmovdqa %xmm13,-80(%r11)
- vmovdqa %xmm11,64(%rsp)
- vpshufd $0xEE,%xmm12,%xmm14
- vmovdqu 48(%rdi),%xmm11
- vpshufd $0x44,%xmm12,%xmm12
- vmovdqa %xmm14,-64(%r11)
- vmovdqa %xmm12,80(%rsp)
- vpshufd $0xEE,%xmm10,%xmm13
- vmovdqu 64(%rdi),%xmm12
- vpshufd $0x44,%xmm10,%xmm10
- vmovdqa %xmm13,-48(%r11)
- vmovdqa %xmm10,96(%rsp)
- vpshufd $0xEE,%xmm11,%xmm14
- vpshufd $0x44,%xmm11,%xmm11
- vmovdqa %xmm14,-32(%r11)
- vmovdqa %xmm11,112(%rsp)
- vpshufd $0xEE,%xmm12,%xmm13
- vmovdqa 0(%rsp),%xmm14
- vpshufd $0x44,%xmm12,%xmm12
- vmovdqa %xmm13,-16(%r11)
- vmovdqa %xmm12,128(%rsp)
-
- jmp .Loop_avx
-
-.align 32
-.Loop_avx:
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- vpmuludq %xmm5,%xmm14,%xmm10
- vpmuludq %xmm6,%xmm14,%xmm11
- vmovdqa %xmm2,32(%r11)
- vpmuludq %xmm7,%xmm14,%xmm12
- vmovdqa 16(%rsp),%xmm2
- vpmuludq %xmm8,%xmm14,%xmm13
- vpmuludq %xmm9,%xmm14,%xmm14
-
- vmovdqa %xmm0,0(%r11)
- vpmuludq 32(%rsp),%xmm9,%xmm0
- vmovdqa %xmm1,16(%r11)
- vpmuludq %xmm8,%xmm2,%xmm1
- vpaddq %xmm0,%xmm10,%xmm10
- vpaddq %xmm1,%xmm14,%xmm14
- vmovdqa %xmm3,48(%r11)
- vpmuludq %xmm7,%xmm2,%xmm0
- vpmuludq %xmm6,%xmm2,%xmm1
- vpaddq %xmm0,%xmm13,%xmm13
- vmovdqa 48(%rsp),%xmm3
- vpaddq %xmm1,%xmm12,%xmm12
- vmovdqa %xmm4,64(%r11)
- vpmuludq %xmm5,%xmm2,%xmm2
- vpmuludq %xmm7,%xmm3,%xmm0
- vpaddq %xmm2,%xmm11,%xmm11
-
- vmovdqa 64(%rsp),%xmm4
- vpaddq %xmm0,%xmm14,%xmm14
- vpmuludq %xmm6,%xmm3,%xmm1
- vpmuludq %xmm5,%xmm3,%xmm3
- vpaddq %xmm1,%xmm13,%xmm13
- vmovdqa 80(%rsp),%xmm2
- vpaddq %xmm3,%xmm12,%xmm12
- vpmuludq %xmm9,%xmm4,%xmm0
- vpmuludq %xmm8,%xmm4,%xmm4
- vpaddq %xmm0,%xmm11,%xmm11
- vmovdqa 96(%rsp),%xmm3
- vpaddq %xmm4,%xmm10,%xmm10
-
- vmovdqa 128(%rsp),%xmm4
- vpmuludq %xmm6,%xmm2,%xmm1
- vpmuludq %xmm5,%xmm2,%xmm2
- vpaddq %xmm1,%xmm14,%xmm14
- vpaddq %xmm2,%xmm13,%xmm13
- vpmuludq %xmm9,%xmm3,%xmm0
- vpmuludq %xmm8,%xmm3,%xmm1
- vpaddq %xmm0,%xmm12,%xmm12
- vmovdqu 0(%rsi),%xmm0
- vpaddq %xmm1,%xmm11,%xmm11
- vpmuludq %xmm7,%xmm3,%xmm3
- vpmuludq %xmm7,%xmm4,%xmm7
- vpaddq %xmm3,%xmm10,%xmm10
-
- vmovdqu 16(%rsi),%xmm1
- vpaddq %xmm7,%xmm11,%xmm11
- vpmuludq %xmm8,%xmm4,%xmm8
- vpmuludq %xmm9,%xmm4,%xmm9
- vpsrldq $6,%xmm0,%xmm2
- vpaddq %xmm8,%xmm12,%xmm12
- vpaddq %xmm9,%xmm13,%xmm13
- vpsrldq $6,%xmm1,%xmm3
- vpmuludq 112(%rsp),%xmm5,%xmm9
- vpmuludq %xmm6,%xmm4,%xmm5
- vpunpckhqdq %xmm1,%xmm0,%xmm4
- vpaddq %xmm9,%xmm14,%xmm14
- vmovdqa -144(%r11),%xmm9
- vpaddq %xmm5,%xmm10,%xmm10
-
- vpunpcklqdq %xmm1,%xmm0,%xmm0
- vpunpcklqdq %xmm3,%xmm2,%xmm3
-
-
- vpsrldq $5,%xmm4,%xmm4
- vpsrlq $26,%xmm0,%xmm1
- vpand %xmm15,%xmm0,%xmm0
- vpsrlq $4,%xmm3,%xmm2
- vpand %xmm15,%xmm1,%xmm1
- vpand 0(%rcx),%xmm4,%xmm4
- vpsrlq $30,%xmm3,%xmm3
- vpand %xmm15,%xmm2,%xmm2
- vpand %xmm15,%xmm3,%xmm3
- vpor 32(%rcx),%xmm4,%xmm4
-
- vpaddq 0(%r11),%xmm0,%xmm0
- vpaddq 16(%r11),%xmm1,%xmm1
- vpaddq 32(%r11),%xmm2,%xmm2
- vpaddq 48(%r11),%xmm3,%xmm3
- vpaddq 64(%r11),%xmm4,%xmm4
-
- leaq 32(%rsi),%rax
- leaq 64(%rsi),%rsi
- subq $64,%rdx
- cmovcq %rax,%rsi
-
-
-
-
-
-
-
-
-
-
- vpmuludq %xmm0,%xmm9,%xmm5
- vpmuludq %xmm1,%xmm9,%xmm6
- vpaddq %xmm5,%xmm10,%xmm10
- vpaddq %xmm6,%xmm11,%xmm11
- vmovdqa -128(%r11),%xmm7
- vpmuludq %xmm2,%xmm9,%xmm5
- vpmuludq %xmm3,%xmm9,%xmm6
- vpaddq %xmm5,%xmm12,%xmm12
- vpaddq %xmm6,%xmm13,%xmm13
- vpmuludq %xmm4,%xmm9,%xmm9
- vpmuludq -112(%r11),%xmm4,%xmm5
- vpaddq %xmm9,%xmm14,%xmm14
-
- vpaddq %xmm5,%xmm10,%xmm10
- vpmuludq %xmm2,%xmm7,%xmm6
- vpmuludq %xmm3,%xmm7,%xmm5
- vpaddq %xmm6,%xmm13,%xmm13
- vmovdqa -96(%r11),%xmm8
- vpaddq %xmm5,%xmm14,%xmm14
- vpmuludq %xmm1,%xmm7,%xmm6
- vpmuludq %xmm0,%xmm7,%xmm7
- vpaddq %xmm6,%xmm12,%xmm12
- vpaddq %xmm7,%xmm11,%xmm11
-
- vmovdqa -80(%r11),%xmm9
- vpmuludq %xmm2,%xmm8,%xmm5
- vpmuludq %xmm1,%xmm8,%xmm6
- vpaddq %xmm5,%xmm14,%xmm14
- vpaddq %xmm6,%xmm13,%xmm13
- vmovdqa -64(%r11),%xmm7
- vpmuludq %xmm0,%xmm8,%xmm8
- vpmuludq %xmm4,%xmm9,%xmm5
- vpaddq %xmm8,%xmm12,%xmm12
- vpaddq %xmm5,%xmm11,%xmm11
- vmovdqa -48(%r11),%xmm8
- vpmuludq %xmm3,%xmm9,%xmm9
- vpmuludq %xmm1,%xmm7,%xmm6
- vpaddq %xmm9,%xmm10,%xmm10
-
- vmovdqa -16(%r11),%xmm9
- vpaddq %xmm6,%xmm14,%xmm14
- vpmuludq %xmm0,%xmm7,%xmm7
- vpmuludq %xmm4,%xmm8,%xmm5
- vpaddq %xmm7,%xmm13,%xmm13
- vpaddq %xmm5,%xmm12,%xmm12
- vmovdqu 32(%rsi),%xmm5
- vpmuludq %xmm3,%xmm8,%xmm7
- vpmuludq %xmm2,%xmm8,%xmm8
- vpaddq %xmm7,%xmm11,%xmm11
- vmovdqu 48(%rsi),%xmm6
- vpaddq %xmm8,%xmm10,%xmm10
-
- vpmuludq %xmm2,%xmm9,%xmm2
- vpmuludq %xmm3,%xmm9,%xmm3
- vpsrldq $6,%xmm5,%xmm7
- vpaddq %xmm2,%xmm11,%xmm11
- vpmuludq %xmm4,%xmm9,%xmm4
- vpsrldq $6,%xmm6,%xmm8
- vpaddq %xmm3,%xmm12,%xmm2
- vpaddq %xmm4,%xmm13,%xmm3
- vpmuludq -32(%r11),%xmm0,%xmm4
- vpmuludq %xmm1,%xmm9,%xmm0
- vpunpckhqdq %xmm6,%xmm5,%xmm9
- vpaddq %xmm4,%xmm14,%xmm4
- vpaddq %xmm0,%xmm10,%xmm0
-
- vpunpcklqdq %xmm6,%xmm5,%xmm5
- vpunpcklqdq %xmm8,%xmm7,%xmm8
-
-
- vpsrldq $5,%xmm9,%xmm9
- vpsrlq $26,%xmm5,%xmm6
- vmovdqa 0(%rsp),%xmm14
- vpand %xmm15,%xmm5,%xmm5
- vpsrlq $4,%xmm8,%xmm7
- vpand %xmm15,%xmm6,%xmm6
- vpand 0(%rcx),%xmm9,%xmm9
- vpsrlq $30,%xmm8,%xmm8
- vpand %xmm15,%xmm7,%xmm7
- vpand %xmm15,%xmm8,%xmm8
- vpor 32(%rcx),%xmm9,%xmm9
-
-
-
-
-
- vpsrlq $26,%xmm3,%xmm13
- vpand %xmm15,%xmm3,%xmm3
- vpaddq %xmm13,%xmm4,%xmm4
-
- vpsrlq $26,%xmm0,%xmm10
- vpand %xmm15,%xmm0,%xmm0
- vpaddq %xmm10,%xmm11,%xmm1
-
- vpsrlq $26,%xmm4,%xmm10
- vpand %xmm15,%xmm4,%xmm4
-
- vpsrlq $26,%xmm1,%xmm11
- vpand %xmm15,%xmm1,%xmm1
- vpaddq %xmm11,%xmm2,%xmm2
-
- vpaddq %xmm10,%xmm0,%xmm0
- vpsllq $2,%xmm10,%xmm10
- vpaddq %xmm10,%xmm0,%xmm0
-
- vpsrlq $26,%xmm2,%xmm12
- vpand %xmm15,%xmm2,%xmm2
- vpaddq %xmm12,%xmm3,%xmm3
-
- vpsrlq $26,%xmm0,%xmm10
- vpand %xmm15,%xmm0,%xmm0
- vpaddq %xmm10,%xmm1,%xmm1
-
- vpsrlq $26,%xmm3,%xmm13
- vpand %xmm15,%xmm3,%xmm3
- vpaddq %xmm13,%xmm4,%xmm4
-
- ja .Loop_avx
-
-.Lskip_loop_avx:
-
-
-
- vpshufd $0x10,%xmm14,%xmm14
- addq $32,%rdx
- jnz .Long_tail_avx
-
- vpaddq %xmm2,%xmm7,%xmm7
- vpaddq %xmm0,%xmm5,%xmm5
- vpaddq %xmm1,%xmm6,%xmm6
- vpaddq %xmm3,%xmm8,%xmm8
- vpaddq %xmm4,%xmm9,%xmm9
-
-.Long_tail_avx:
- vmovdqa %xmm2,32(%r11)
- vmovdqa %xmm0,0(%r11)
- vmovdqa %xmm1,16(%r11)
- vmovdqa %xmm3,48(%r11)
- vmovdqa %xmm4,64(%r11)
-
-
-
-
-
-
-
- vpmuludq %xmm7,%xmm14,%xmm12
- vpmuludq %xmm5,%xmm14,%xmm10
- vpshufd $0x10,-48(%rdi),%xmm2
- vpmuludq %xmm6,%xmm14,%xmm11
- vpmuludq %xmm8,%xmm14,%xmm13
- vpmuludq %xmm9,%xmm14,%xmm14
-
- vpmuludq %xmm8,%xmm2,%xmm0
- vpaddq %xmm0,%xmm14,%xmm14
- vpshufd $0x10,-32(%rdi),%xmm3
- vpmuludq %xmm7,%xmm2,%xmm1
- vpaddq %xmm1,%xmm13,%xmm13
- vpshufd $0x10,-16(%rdi),%xmm4
- vpmuludq %xmm6,%xmm2,%xmm0
- vpaddq %xmm0,%xmm12,%xmm12
- vpmuludq %xmm5,%xmm2,%xmm2
- vpaddq %xmm2,%xmm11,%xmm11
- vpmuludq %xmm9,%xmm3,%xmm3
- vpaddq %xmm3,%xmm10,%xmm10
-
- vpshufd $0x10,0(%rdi),%xmm2
- vpmuludq %xmm7,%xmm4,%xmm1
- vpaddq %xmm1,%xmm14,%xmm14
- vpmuludq %xmm6,%xmm4,%xmm0
- vpaddq %xmm0,%xmm13,%xmm13
- vpshufd $0x10,16(%rdi),%xmm3
- vpmuludq %xmm5,%xmm4,%xmm4
- vpaddq %xmm4,%xmm12,%xmm12
- vpmuludq %xmm9,%xmm2,%xmm1
- vpaddq %xmm1,%xmm11,%xmm11
- vpshufd $0x10,32(%rdi),%xmm4
- vpmuludq %xmm8,%xmm2,%xmm2
- vpaddq %xmm2,%xmm10,%xmm10
-
- vpmuludq %xmm6,%xmm3,%xmm0
- vpaddq %xmm0,%xmm14,%xmm14
- vpmuludq %xmm5,%xmm3,%xmm3
- vpaddq %xmm3,%xmm13,%xmm13
- vpshufd $0x10,48(%rdi),%xmm2
- vpmuludq %xmm9,%xmm4,%xmm1
- vpaddq %xmm1,%xmm12,%xmm12
- vpshufd $0x10,64(%rdi),%xmm3
- vpmuludq %xmm8,%xmm4,%xmm0
- vpaddq %xmm0,%xmm11,%xmm11
- vpmuludq %xmm7,%xmm4,%xmm4
- vpaddq %xmm4,%xmm10,%xmm10
-
- vpmuludq %xmm5,%xmm2,%xmm2
- vpaddq %xmm2,%xmm14,%xmm14
- vpmuludq %xmm9,%xmm3,%xmm1
- vpaddq %xmm1,%xmm13,%xmm13
- vpmuludq %xmm8,%xmm3,%xmm0
- vpaddq %xmm0,%xmm12,%xmm12
- vpmuludq %xmm7,%xmm3,%xmm1
- vpaddq %xmm1,%xmm11,%xmm11
- vpmuludq %xmm6,%xmm3,%xmm3
- vpaddq %xmm3,%xmm10,%xmm10
-
- jz .Lshort_tail_avx
-
- vmovdqu 0(%rsi),%xmm0
- vmovdqu 16(%rsi),%xmm1
-
- vpsrldq $6,%xmm0,%xmm2
- vpsrldq $6,%xmm1,%xmm3
- vpunpckhqdq %xmm1,%xmm0,%xmm4
- vpunpcklqdq %xmm1,%xmm0,%xmm0
- vpunpcklqdq %xmm3,%xmm2,%xmm3
-
- vpsrlq $40,%xmm4,%xmm4
- vpsrlq $26,%xmm0,%xmm1
- vpand %xmm15,%xmm0,%xmm0
- vpsrlq $4,%xmm3,%xmm2
- vpand %xmm15,%xmm1,%xmm1
- vpsrlq $30,%xmm3,%xmm3
- vpand %xmm15,%xmm2,%xmm2
- vpand %xmm15,%xmm3,%xmm3
- vpor 32(%rcx),%xmm4,%xmm4
-
- vpshufd $0x32,-64(%rdi),%xmm9
- vpaddq 0(%r11),%xmm0,%xmm0
- vpaddq 16(%r11),%xmm1,%xmm1
- vpaddq 32(%r11),%xmm2,%xmm2
- vpaddq 48(%r11),%xmm3,%xmm3
- vpaddq 64(%r11),%xmm4,%xmm4
-
-
-
-
- vpmuludq %xmm0,%xmm9,%xmm5
- vpaddq %xmm5,%xmm10,%xmm10
- vpmuludq %xmm1,%xmm9,%xmm6
- vpaddq %xmm6,%xmm11,%xmm11
- vpmuludq %xmm2,%xmm9,%xmm5
- vpaddq %xmm5,%xmm12,%xmm12
- vpshufd $0x32,-48(%rdi),%xmm7
- vpmuludq %xmm3,%xmm9,%xmm6
- vpaddq %xmm6,%xmm13,%xmm13
- vpmuludq %xmm4,%xmm9,%xmm9
- vpaddq %xmm9,%xmm14,%xmm14
-
- vpmuludq %xmm3,%xmm7,%xmm5
- vpaddq %xmm5,%xmm14,%xmm14
- vpshufd $0x32,-32(%rdi),%xmm8
- vpmuludq %xmm2,%xmm7,%xmm6
- vpaddq %xmm6,%xmm13,%xmm13
- vpshufd $0x32,-16(%rdi),%xmm9
- vpmuludq %xmm1,%xmm7,%xmm5
- vpaddq %xmm5,%xmm12,%xmm12
- vpmuludq %xmm0,%xmm7,%xmm7
- vpaddq %xmm7,%xmm11,%xmm11
- vpmuludq %xmm4,%xmm8,%xmm8
- vpaddq %xmm8,%xmm10,%xmm10
-
- vpshufd $0x32,0(%rdi),%xmm7
- vpmuludq %xmm2,%xmm9,%xmm6
- vpaddq %xmm6,%xmm14,%xmm14
- vpmuludq %xmm1,%xmm9,%xmm5
- vpaddq %xmm5,%xmm13,%xmm13
- vpshufd $0x32,16(%rdi),%xmm8
- vpmuludq %xmm0,%xmm9,%xmm9
- vpaddq %xmm9,%xmm12,%xmm12
- vpmuludq %xmm4,%xmm7,%xmm6
- vpaddq %xmm6,%xmm11,%xmm11
- vpshufd $0x32,32(%rdi),%xmm9
- vpmuludq %xmm3,%xmm7,%xmm7
- vpaddq %xmm7,%xmm10,%xmm10
-
- vpmuludq %xmm1,%xmm8,%xmm5
- vpaddq %xmm5,%xmm14,%xmm14
- vpmuludq %xmm0,%xmm8,%xmm8
- vpaddq %xmm8,%xmm13,%xmm13
- vpshufd $0x32,48(%rdi),%xmm7
- vpmuludq %xmm4,%xmm9,%xmm6
- vpaddq %xmm6,%xmm12,%xmm12
- vpshufd $0x32,64(%rdi),%xmm8
- vpmuludq %xmm3,%xmm9,%xmm5
- vpaddq %xmm5,%xmm11,%xmm11
- vpmuludq %xmm2,%xmm9,%xmm9
- vpaddq %xmm9,%xmm10,%xmm10
-
- vpmuludq %xmm0,%xmm7,%xmm7
- vpaddq %xmm7,%xmm14,%xmm14
- vpmuludq %xmm4,%xmm8,%xmm6
- vpaddq %xmm6,%xmm13,%xmm13
- vpmuludq %xmm3,%xmm8,%xmm5
- vpaddq %xmm5,%xmm12,%xmm12
- vpmuludq %xmm2,%xmm8,%xmm6
- vpaddq %xmm6,%xmm11,%xmm11
- vpmuludq %xmm1,%xmm8,%xmm8
- vpaddq %xmm8,%xmm10,%xmm10
-
-.Lshort_tail_avx:
-
-
-
- vpsrldq $8,%xmm14,%xmm9
- vpsrldq $8,%xmm13,%xmm8
- vpsrldq $8,%xmm11,%xmm6
- vpsrldq $8,%xmm10,%xmm5
- vpsrldq $8,%xmm12,%xmm7
- vpaddq %xmm8,%xmm13,%xmm13
- vpaddq %xmm9,%xmm14,%xmm14
- vpaddq %xmm5,%xmm10,%xmm10
- vpaddq %xmm6,%xmm11,%xmm11
- vpaddq %xmm7,%xmm12,%xmm12
-
-
-
-
- vpsrlq $26,%xmm13,%xmm3
- vpand %xmm15,%xmm13,%xmm13
- vpaddq %xmm3,%xmm14,%xmm14
-
- vpsrlq $26,%xmm10,%xmm0
- vpand %xmm15,%xmm10,%xmm10
- vpaddq %xmm0,%xmm11,%xmm11
-
- vpsrlq $26,%xmm14,%xmm4
- vpand %xmm15,%xmm14,%xmm14
-
- vpsrlq $26,%xmm11,%xmm1
- vpand %xmm15,%xmm11,%xmm11
- vpaddq %xmm1,%xmm12,%xmm12
-
- vpaddq %xmm4,%xmm10,%xmm10
- vpsllq $2,%xmm4,%xmm4
- vpaddq %xmm4,%xmm10,%xmm10
-
- vpsrlq $26,%xmm12,%xmm2
- vpand %xmm15,%xmm12,%xmm12
- vpaddq %xmm2,%xmm13,%xmm13
-
- vpsrlq $26,%xmm10,%xmm0
- vpand %xmm15,%xmm10,%xmm10
- vpaddq %xmm0,%xmm11,%xmm11
-
- vpsrlq $26,%xmm13,%xmm3
- vpand %xmm15,%xmm13,%xmm13
- vpaddq %xmm3,%xmm14,%xmm14
-
- vmovd %xmm10,-112(%rdi)
- vmovd %xmm11,-108(%rdi)
- vmovd %xmm12,-104(%rdi)
- vmovd %xmm13,-100(%rdi)
- vmovd %xmm14,-96(%rdi)
- leaq 88(%r11),%rsp
-.cfi_def_cfa %rsp,8
- vzeroupper
- .byte 0xf3,0xc3
-.cfi_endproc
-.size poly1305_blocks_avx,.-poly1305_blocks_avx
-
-.type poly1305_emit_avx,@function
-.align 32
-poly1305_emit_avx:
- cmpl $0,20(%rdi)
- je .Lemit
-
- movl 0(%rdi),%eax
- movl 4(%rdi),%ecx
- movl 8(%rdi),%r8d
- movl 12(%rdi),%r11d
- movl 16(%rdi),%r10d
-
- shlq $26,%rcx
- movq %r8,%r9
- shlq $52,%r8
- addq %rcx,%rax
- shrq $12,%r9
- addq %rax,%r8
- adcq $0,%r9
-
- shlq $14,%r11
- movq %r10,%rax
- shrq $24,%r10
- addq %r11,%r9
- shlq $40,%rax
- addq %rax,%r9
- adcq $0,%r10
-
- movq %r10,%rax
- movq %r10,%rcx
- andq $3,%r10
- shrq $2,%rax
- andq $-4,%rcx
- addq %rcx,%rax
- addq %rax,%r8
- adcq $0,%r9
- adcq $0,%r10
-
- movq %r8,%rax
- addq $5,%r8
- movq %r9,%rcx
- adcq $0,%r9
- adcq $0,%r10
- shrq $2,%r10
- cmovnzq %r8,%rax
- cmovnzq %r9,%rcx
-
- addq 0(%rdx),%rax
- adcq 8(%rdx),%rcx
- movq %rax,0(%rsi)
- movq %rcx,8(%rsi)
-
- .byte 0xf3,0xc3
-.size poly1305_emit_avx,.-poly1305_emit_avx
-.type poly1305_blocks_avx2,@function
-.align 32
-poly1305_blocks_avx2:
-.cfi_startproc
- movl 20(%rdi),%r8d
- cmpq $128,%rdx
- jae .Lblocks_avx2
- testl %r8d,%r8d
- jz .Lblocks
-
-.Lblocks_avx2:
- andq $-16,%rdx
- jz .Lno_data_avx2
-
- vzeroupper
-
- testl %r8d,%r8d
- jz .Lbase2_64_avx2
-
- testq $63,%rdx
- jz .Leven_avx2
-
- pushq %rbx
-.cfi_adjust_cfa_offset 8
-.cfi_offset %rbx,-16
- pushq %rbp
-.cfi_adjust_cfa_offset 8
-.cfi_offset %rbp,-24
- pushq %r12
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r12,-32
- pushq %r13
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r13,-40
- pushq %r14
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r14,-48
- pushq %r15
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r15,-56
-.Lblocks_avx2_body:
-
- movq %rdx,%r15
-
- movq 0(%rdi),%r8
- movq 8(%rdi),%r9
- movl 16(%rdi),%ebp
-
- movq 24(%rdi),%r11
- movq 32(%rdi),%r13
-
-
- movl %r8d,%r14d
- andq $-2147483648,%r8
- movq %r9,%r12
- movl %r9d,%ebx
- andq $-2147483648,%r9
-
- shrq $6,%r8
- shlq $52,%r12
- addq %r8,%r14
- shrq $12,%rbx
- shrq $18,%r9
- addq %r12,%r14
- adcq %r9,%rbx
-
- movq %rbp,%r8
- shlq $40,%r8
- shrq $24,%rbp
- addq %r8,%rbx
- adcq $0,%rbp
-
- movq $-4,%r9
- movq %rbp,%r8
- andq %rbp,%r9
- shrq $2,%r8
- andq $3,%rbp
- addq %r9,%r8
- addq %r8,%r14
- adcq $0,%rbx
- adcq $0,%rbp
-
- movq %r13,%r12
- movq %r13,%rax
- shrq $2,%r13
- addq %r12,%r13
-
-.Lbase2_26_pre_avx2:
- addq 0(%rsi),%r14
- adcq 8(%rsi),%rbx
- leaq 16(%rsi),%rsi
- adcq %rcx,%rbp
- subq $16,%r15
-
- call __poly1305_block
- movq %r12,%rax
-
- testq $63,%r15
- jnz .Lbase2_26_pre_avx2
-
- testq %rcx,%rcx
- jz .Lstore_base2_64_avx2
-
-
- movq %r14,%rax
- movq %r14,%rdx
- shrq $52,%r14
- movq %rbx,%r11
- movq %rbx,%r12
- shrq $26,%rdx
- andq $0x3ffffff,%rax
- shlq $12,%r11
- andq $0x3ffffff,%rdx
- shrq $14,%rbx
- orq %r11,%r14
- shlq $24,%rbp
- andq $0x3ffffff,%r14
- shrq $40,%r12
- andq $0x3ffffff,%rbx
- orq %r12,%rbp
-
- testq %r15,%r15
- jz .Lstore_base2_26_avx2
-
- vmovd %eax,%xmm0
- vmovd %edx,%xmm1
- vmovd %r14d,%xmm2
- vmovd %ebx,%xmm3
- vmovd %ebp,%xmm4
- jmp .Lproceed_avx2
-
-.align 32
-.Lstore_base2_64_avx2:
- movq %r14,0(%rdi)
- movq %rbx,8(%rdi)
- movq %rbp,16(%rdi)
- jmp .Ldone_avx2
-
-.align 16
-.Lstore_base2_26_avx2:
- movl %eax,0(%rdi)
- movl %edx,4(%rdi)
- movl %r14d,8(%rdi)
- movl %ebx,12(%rdi)
- movl %ebp,16(%rdi)
-.align 16
-.Ldone_avx2:
- movq 0(%rsp),%r15
-.cfi_restore %r15
- movq 8(%rsp),%r14
-.cfi_restore %r14
- movq 16(%rsp),%r13
-.cfi_restore %r13
- movq 24(%rsp),%r12
-.cfi_restore %r12
- movq 32(%rsp),%rbp
-.cfi_restore %rbp
- movq 40(%rsp),%rbx
-.cfi_restore %rbx
- leaq 48(%rsp),%rsp
-.cfi_adjust_cfa_offset -48
-.Lno_data_avx2:
-.Lblocks_avx2_epilogue:
- .byte 0xf3,0xc3
-.cfi_endproc
-
-.align 32
-.Lbase2_64_avx2:
-.cfi_startproc
- pushq %rbx
-.cfi_adjust_cfa_offset 8
-.cfi_offset %rbx,-16
- pushq %rbp
-.cfi_adjust_cfa_offset 8
-.cfi_offset %rbp,-24
- pushq %r12
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r12,-32
- pushq %r13
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r13,-40
- pushq %r14
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r14,-48
- pushq %r15
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r15,-56
-.Lbase2_64_avx2_body:
-
- movq %rdx,%r15
-
- movq 24(%rdi),%r11
- movq 32(%rdi),%r13
-
- movq 0(%rdi),%r14
- movq 8(%rdi),%rbx
- movl 16(%rdi),%ebp
-
- movq %r13,%r12
- movq %r13,%rax
- shrq $2,%r13
- addq %r12,%r13
-
- testq $63,%rdx
- jz .Linit_avx2
-
-.Lbase2_64_pre_avx2:
- addq 0(%rsi),%r14
- adcq 8(%rsi),%rbx
- leaq 16(%rsi),%rsi
- adcq %rcx,%rbp
- subq $16,%r15
-
- call __poly1305_block
- movq %r12,%rax
-
- testq $63,%r15
- jnz .Lbase2_64_pre_avx2
-
-.Linit_avx2:
-
- movq %r14,%rax
- movq %r14,%rdx
- shrq $52,%r14
- movq %rbx,%r8
- movq %rbx,%r9
- shrq $26,%rdx
- andq $0x3ffffff,%rax
- shlq $12,%r8
- andq $0x3ffffff,%rdx
- shrq $14,%rbx
- orq %r8,%r14
- shlq $24,%rbp
- andq $0x3ffffff,%r14
- shrq $40,%r9
- andq $0x3ffffff,%rbx
- orq %r9,%rbp
-
- vmovd %eax,%xmm0
- vmovd %edx,%xmm1
- vmovd %r14d,%xmm2
- vmovd %ebx,%xmm3
- vmovd %ebp,%xmm4
- movl $1,20(%rdi)
-
- call __poly1305_init_avx
-
-.Lproceed_avx2:
- movq %r15,%rdx
- movl OPENSSL_ia32cap_P+8(%rip),%r10d
- movl $3221291008,%r11d
-
- movq 0(%rsp),%r15
-.cfi_restore %r15
- movq 8(%rsp),%r14
-.cfi_restore %r14
- movq 16(%rsp),%r13
-.cfi_restore %r13
- movq 24(%rsp),%r12
-.cfi_restore %r12
- movq 32(%rsp),%rbp
-.cfi_restore %rbp
- movq 40(%rsp),%rbx
-.cfi_restore %rbx
- leaq 48(%rsp),%rax
- leaq 48(%rsp),%rsp
-.cfi_adjust_cfa_offset -48
-.Lbase2_64_avx2_epilogue:
- jmp .Ldo_avx2
-.cfi_endproc
-
-.align 32
-.Leven_avx2:
-.cfi_startproc
- movl OPENSSL_ia32cap_P+8(%rip),%r10d
- vmovd 0(%rdi),%xmm0
- vmovd 4(%rdi),%xmm1
- vmovd 8(%rdi),%xmm2
- vmovd 12(%rdi),%xmm3
- vmovd 16(%rdi),%xmm4
-
-.Ldo_avx2:
- leaq -8(%rsp),%r11
-.cfi_def_cfa %r11,16
- subq $0x128,%rsp
- leaq .Lconst(%rip),%rcx
- leaq 48+64(%rdi),%rdi
- vmovdqa 96(%rcx),%ymm7
-
-
- vmovdqu -64(%rdi),%xmm9
- andq $-512,%rsp
- vmovdqu -48(%rdi),%xmm10
- vmovdqu -32(%rdi),%xmm6
- vmovdqu -16(%rdi),%xmm11
- vmovdqu 0(%rdi),%xmm12
- vmovdqu 16(%rdi),%xmm13
- leaq 144(%rsp),%rax
- vmovdqu 32(%rdi),%xmm14
- vpermd %ymm9,%ymm7,%ymm9
- vmovdqu 48(%rdi),%xmm15
- vpermd %ymm10,%ymm7,%ymm10
- vmovdqu 64(%rdi),%xmm5
- vpermd %ymm6,%ymm7,%ymm6
- vmovdqa %ymm9,0(%rsp)
- vpermd %ymm11,%ymm7,%ymm11
- vmovdqa %ymm10,32-144(%rax)
- vpermd %ymm12,%ymm7,%ymm12
- vmovdqa %ymm6,64-144(%rax)
- vpermd %ymm13,%ymm7,%ymm13
- vmovdqa %ymm11,96-144(%rax)
- vpermd %ymm14,%ymm7,%ymm14
- vmovdqa %ymm12,128-144(%rax)
- vpermd %ymm15,%ymm7,%ymm15
- vmovdqa %ymm13,160-144(%rax)
- vpermd %ymm5,%ymm7,%ymm5
- vmovdqa %ymm14,192-144(%rax)
- vmovdqa %ymm15,224-144(%rax)
- vmovdqa %ymm5,256-144(%rax)
- vmovdqa 64(%rcx),%ymm5
-
-
-
- vmovdqu 0(%rsi),%xmm7
- vmovdqu 16(%rsi),%xmm8
- vinserti128 $1,32(%rsi),%ymm7,%ymm7
- vinserti128 $1,48(%rsi),%ymm8,%ymm8
- leaq 64(%rsi),%rsi
-
- vpsrldq $6,%ymm7,%ymm9
- vpsrldq $6,%ymm8,%ymm10
- vpunpckhqdq %ymm8,%ymm7,%ymm6
- vpunpcklqdq %ymm10,%ymm9,%ymm9
- vpunpcklqdq %ymm8,%ymm7,%ymm7
-
- vpsrlq $30,%ymm9,%ymm10
- vpsrlq $4,%ymm9,%ymm9
- vpsrlq $26,%ymm7,%ymm8
- vpsrlq $40,%ymm6,%ymm6
- vpand %ymm5,%ymm9,%ymm9
- vpand %ymm5,%ymm7,%ymm7
- vpand %ymm5,%ymm8,%ymm8
- vpand %ymm5,%ymm10,%ymm10
- vpor 32(%rcx),%ymm6,%ymm6
-
- vpaddq %ymm2,%ymm9,%ymm2
- subq $64,%rdx
- jz .Ltail_avx2
- jmp .Loop_avx2
-
-.align 32
-.Loop_avx2:
-
-
-
-
-
-
-
-
- vpaddq %ymm0,%ymm7,%ymm0
- vmovdqa 0(%rsp),%ymm7
- vpaddq %ymm1,%ymm8,%ymm1
- vmovdqa 32(%rsp),%ymm8
- vpaddq %ymm3,%ymm10,%ymm3
- vmovdqa 96(%rsp),%ymm9
- vpaddq %ymm4,%ymm6,%ymm4
- vmovdqa 48(%rax),%ymm10
- vmovdqa 112(%rax),%ymm5
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- vpmuludq %ymm2,%ymm7,%ymm13
- vpmuludq %ymm2,%ymm8,%ymm14
- vpmuludq %ymm2,%ymm9,%ymm15
- vpmuludq %ymm2,%ymm10,%ymm11
- vpmuludq %ymm2,%ymm5,%ymm12
-
- vpmuludq %ymm0,%ymm8,%ymm6
- vpmuludq %ymm1,%ymm8,%ymm2
- vpaddq %ymm6,%ymm12,%ymm12
- vpaddq %ymm2,%ymm13,%ymm13
- vpmuludq %ymm3,%ymm8,%ymm6
- vpmuludq 64(%rsp),%ymm4,%ymm2
- vpaddq %ymm6,%ymm15,%ymm15
- vpaddq %ymm2,%ymm11,%ymm11
- vmovdqa -16(%rax),%ymm8
-
- vpmuludq %ymm0,%ymm7,%ymm6
- vpmuludq %ymm1,%ymm7,%ymm2
- vpaddq %ymm6,%ymm11,%ymm11
- vpaddq %ymm2,%ymm12,%ymm12
- vpmuludq %ymm3,%ymm7,%ymm6
- vpmuludq %ymm4,%ymm7,%ymm2
- vmovdqu 0(%rsi),%xmm7
- vpaddq %ymm6,%ymm14,%ymm14
- vpaddq %ymm2,%ymm15,%ymm15
- vinserti128 $1,32(%rsi),%ymm7,%ymm7
-
- vpmuludq %ymm3,%ymm8,%ymm6
- vpmuludq %ymm4,%ymm8,%ymm2
- vmovdqu 16(%rsi),%xmm8
- vpaddq %ymm6,%ymm11,%ymm11
- vpaddq %ymm2,%ymm12,%ymm12
- vmovdqa 16(%rax),%ymm2
- vpmuludq %ymm1,%ymm9,%ymm6
- vpmuludq %ymm0,%ymm9,%ymm9
- vpaddq %ymm6,%ymm14,%ymm14
- vpaddq %ymm9,%ymm13,%ymm13
- vinserti128 $1,48(%rsi),%ymm8,%ymm8
- leaq 64(%rsi),%rsi
-
- vpmuludq %ymm1,%ymm2,%ymm6
- vpmuludq %ymm0,%ymm2,%ymm2
- vpsrldq $6,%ymm7,%ymm9
- vpaddq %ymm6,%ymm15,%ymm15
- vpaddq %ymm2,%ymm14,%ymm14
- vpmuludq %ymm3,%ymm10,%ymm6
- vpmuludq %ymm4,%ymm10,%ymm2
- vpsrldq $6,%ymm8,%ymm10
- vpaddq %ymm6,%ymm12,%ymm12
- vpaddq %ymm2,%ymm13,%ymm13
- vpunpckhqdq %ymm8,%ymm7,%ymm6
-
- vpmuludq %ymm3,%ymm5,%ymm3
- vpmuludq %ymm4,%ymm5,%ymm4
- vpunpcklqdq %ymm8,%ymm7,%ymm7
- vpaddq %ymm3,%ymm13,%ymm2
- vpaddq %ymm4,%ymm14,%ymm3
- vpunpcklqdq %ymm10,%ymm9,%ymm10
- vpmuludq 80(%rax),%ymm0,%ymm4
- vpmuludq %ymm1,%ymm5,%ymm0
- vmovdqa 64(%rcx),%ymm5
- vpaddq %ymm4,%ymm15,%ymm4
- vpaddq %ymm0,%ymm11,%ymm0
-
-
-
-
- vpsrlq $26,%ymm3,%ymm14
- vpand %ymm5,%ymm3,%ymm3
- vpaddq %ymm14,%ymm4,%ymm4
-
- vpsrlq $26,%ymm0,%ymm11
- vpand %ymm5,%ymm0,%ymm0
- vpaddq %ymm11,%ymm12,%ymm1
-
- vpsrlq $26,%ymm4,%ymm15
- vpand %ymm5,%ymm4,%ymm4
-
- vpsrlq $4,%ymm10,%ymm9
-
- vpsrlq $26,%ymm1,%ymm12
- vpand %ymm5,%ymm1,%ymm1
- vpaddq %ymm12,%ymm2,%ymm2
-
- vpaddq %ymm15,%ymm0,%ymm0
- vpsllq $2,%ymm15,%ymm15
- vpaddq %ymm15,%ymm0,%ymm0
-
- vpand %ymm5,%ymm9,%ymm9
- vpsrlq $26,%ymm7,%ymm8
-
- vpsrlq $26,%ymm2,%ymm13
- vpand %ymm5,%ymm2,%ymm2
- vpaddq %ymm13,%ymm3,%ymm3
-
- vpaddq %ymm9,%ymm2,%ymm2
- vpsrlq $30,%ymm10,%ymm10
-
- vpsrlq $26,%ymm0,%ymm11
- vpand %ymm5,%ymm0,%ymm0
- vpaddq %ymm11,%ymm1,%ymm1
-
- vpsrlq $40,%ymm6,%ymm6
-
- vpsrlq $26,%ymm3,%ymm14
- vpand %ymm5,%ymm3,%ymm3
- vpaddq %ymm14,%ymm4,%ymm4
-
- vpand %ymm5,%ymm7,%ymm7
- vpand %ymm5,%ymm8,%ymm8
- vpand %ymm5,%ymm10,%ymm10
- vpor 32(%rcx),%ymm6,%ymm6
-
- subq $64,%rdx
- jnz .Loop_avx2
-
-.byte 0x66,0x90
-.Ltail_avx2:
-
-
-
-
-
-
-
- vpaddq %ymm0,%ymm7,%ymm0
- vmovdqu 4(%rsp),%ymm7
- vpaddq %ymm1,%ymm8,%ymm1
- vmovdqu 36(%rsp),%ymm8
- vpaddq %ymm3,%ymm10,%ymm3
- vmovdqu 100(%rsp),%ymm9
- vpaddq %ymm4,%ymm6,%ymm4
- vmovdqu 52(%rax),%ymm10
- vmovdqu 116(%rax),%ymm5
-
- vpmuludq %ymm2,%ymm7,%ymm13
- vpmuludq %ymm2,%ymm8,%ymm14
- vpmuludq %ymm2,%ymm9,%ymm15
- vpmuludq %ymm2,%ymm10,%ymm11
- vpmuludq %ymm2,%ymm5,%ymm12
-
- vpmuludq %ymm0,%ymm8,%ymm6
- vpmuludq %ymm1,%ymm8,%ymm2
- vpaddq %ymm6,%ymm12,%ymm12
- vpaddq %ymm2,%ymm13,%ymm13
- vpmuludq %ymm3,%ymm8,%ymm6
- vpmuludq 68(%rsp),%ymm4,%ymm2
- vpaddq %ymm6,%ymm15,%ymm15
- vpaddq %ymm2,%ymm11,%ymm11
-
- vpmuludq %ymm0,%ymm7,%ymm6
- vpmuludq %ymm1,%ymm7,%ymm2
- vpaddq %ymm6,%ymm11,%ymm11
- vmovdqu -12(%rax),%ymm8
- vpaddq %ymm2,%ymm12,%ymm12
- vpmuludq %ymm3,%ymm7,%ymm6
- vpmuludq %ymm4,%ymm7,%ymm2
- vpaddq %ymm6,%ymm14,%ymm14
- vpaddq %ymm2,%ymm15,%ymm15
-
- vpmuludq %ymm3,%ymm8,%ymm6
- vpmuludq %ymm4,%ymm8,%ymm2
- vpaddq %ymm6,%ymm11,%ymm11
- vpaddq %ymm2,%ymm12,%ymm12
- vmovdqu 20(%rax),%ymm2
- vpmuludq %ymm1,%ymm9,%ymm6
- vpmuludq %ymm0,%ymm9,%ymm9
- vpaddq %ymm6,%ymm14,%ymm14
- vpaddq %ymm9,%ymm13,%ymm13
-
- vpmuludq %ymm1,%ymm2,%ymm6
- vpmuludq %ymm0,%ymm2,%ymm2
- vpaddq %ymm6,%ymm15,%ymm15
- vpaddq %ymm2,%ymm14,%ymm14
- vpmuludq %ymm3,%ymm10,%ymm6
- vpmuludq %ymm4,%ymm10,%ymm2
- vpaddq %ymm6,%ymm12,%ymm12
- vpaddq %ymm2,%ymm13,%ymm13
-
- vpmuludq %ymm3,%ymm5,%ymm3
- vpmuludq %ymm4,%ymm5,%ymm4
- vpaddq %ymm3,%ymm13,%ymm2
- vpaddq %ymm4,%ymm14,%ymm3
- vpmuludq 84(%rax),%ymm0,%ymm4
- vpmuludq %ymm1,%ymm5,%ymm0
- vmovdqa 64(%rcx),%ymm5
- vpaddq %ymm4,%ymm15,%ymm4
- vpaddq %ymm0,%ymm11,%ymm0
-
-
-
-
- vpsrldq $8,%ymm12,%ymm8
- vpsrldq $8,%ymm2,%ymm9
- vpsrldq $8,%ymm3,%ymm10
- vpsrldq $8,%ymm4,%ymm6
- vpsrldq $8,%ymm0,%ymm7
- vpaddq %ymm8,%ymm12,%ymm12
- vpaddq %ymm9,%ymm2,%ymm2
- vpaddq %ymm10,%ymm3,%ymm3
- vpaddq %ymm6,%ymm4,%ymm4
- vpaddq %ymm7,%ymm0,%ymm0
-
- vpermq $0x2,%ymm3,%ymm10
- vpermq $0x2,%ymm4,%ymm6
- vpermq $0x2,%ymm0,%ymm7
- vpermq $0x2,%ymm12,%ymm8
- vpermq $0x2,%ymm2,%ymm9
- vpaddq %ymm10,%ymm3,%ymm3
- vpaddq %ymm6,%ymm4,%ymm4
- vpaddq %ymm7,%ymm0,%ymm0
- vpaddq %ymm8,%ymm12,%ymm12
- vpaddq %ymm9,%ymm2,%ymm2
-
-
-
-
- vpsrlq $26,%ymm3,%ymm14
- vpand %ymm5,%ymm3,%ymm3
- vpaddq %ymm14,%ymm4,%ymm4
-
- vpsrlq $26,%ymm0,%ymm11
- vpand %ymm5,%ymm0,%ymm0
- vpaddq %ymm11,%ymm12,%ymm1
-
- vpsrlq $26,%ymm4,%ymm15
- vpand %ymm5,%ymm4,%ymm4
-
- vpsrlq $26,%ymm1,%ymm12
- vpand %ymm5,%ymm1,%ymm1
- vpaddq %ymm12,%ymm2,%ymm2
-
- vpaddq %ymm15,%ymm0,%ymm0
- vpsllq $2,%ymm15,%ymm15
- vpaddq %ymm15,%ymm0,%ymm0
-
- vpsrlq $26,%ymm2,%ymm13
- vpand %ymm5,%ymm2,%ymm2
- vpaddq %ymm13,%ymm3,%ymm3
-
- vpsrlq $26,%ymm0,%ymm11
- vpand %ymm5,%ymm0,%ymm0
- vpaddq %ymm11,%ymm1,%ymm1
-
- vpsrlq $26,%ymm3,%ymm14
- vpand %ymm5,%ymm3,%ymm3
- vpaddq %ymm14,%ymm4,%ymm4
-
- vmovd %xmm0,-112(%rdi)
- vmovd %xmm1,-108(%rdi)
- vmovd %xmm2,-104(%rdi)
- vmovd %xmm3,-100(%rdi)
- vmovd %xmm4,-96(%rdi)
- leaq 8(%r11),%rsp
-.cfi_def_cfa %rsp,8
- vzeroupper
- .byte 0xf3,0xc3
.cfi_endproc
-.size poly1305_blocks_avx2,.-poly1305_blocks_avx2
-.align 64
-.Lconst:
-.Lmask24:
-.long 0x0ffffff,0,0x0ffffff,0,0x0ffffff,0,0x0ffffff,0
-.L129:
-.long 16777216,0,16777216,0,16777216,0,16777216,0
-.Lmask26:
-.long 0x3ffffff,0,0x3ffffff,0,0x3ffffff,0,0x3ffffff,0
-.Lpermd_avx2:
-.long 2,2,2,3,2,0,2,1
-.Lpermd_avx512:
-.long 0,0,0,1, 0,2,0,3, 0,4,0,5, 0,6,0,7
-
-.L2_44_inp_permd:
-.long 0,1,1,2,2,3,7,7
-.L2_44_inp_shift:
-.quad 0,12,24,64
-.L2_44_mask:
-.quad 0xfffffffffff,0xfffffffffff,0x3ffffffffff,0xffffffffffffffff
-.L2_44_shift_rgt:
-.quad 44,44,42,64
-.L2_44_shift_lft:
-.quad 8,8,10,64
-
-.align 64
-.Lx_mask44:
-.quad 0xfffffffffff,0xfffffffffff,0xfffffffffff,0xfffffffffff
-.quad 0xfffffffffff,0xfffffffffff,0xfffffffffff,0xfffffffffff
-.Lx_mask42:
-.quad 0x3ffffffffff,0x3ffffffffff,0x3ffffffffff,0x3ffffffffff
-.quad 0x3ffffffffff,0x3ffffffffff,0x3ffffffffff,0x3ffffffffff
+.size poly1305_emit,.-poly1305_emit
.byte 80,111,108,121,49,51,48,53,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
.align 16
.globl xor128_encrypt_n_pad
.type xor128_encrypt_n_pad,@function
.align 16
xor128_encrypt_n_pad:
+.cfi_startproc
subq %rdx,%rsi
subq %rdx,%rdi
movq %rcx,%r10
@@ -2002,12 +228,14 @@ xor128_encrypt_n_pad:
.Ldone_enc:
movq %rdx,%rax
.byte 0xf3,0xc3
+.cfi_endproc
.size xor128_encrypt_n_pad,.-xor128_encrypt_n_pad
.globl xor128_decrypt_n_pad
.type xor128_decrypt_n_pad,@function
.align 16
xor128_decrypt_n_pad:
+.cfi_startproc
subq %rdx,%rsi
subq %rdx,%rdi
movq %rcx,%r10
@@ -2053,4 +281,5 @@ xor128_decrypt_n_pad:
.Ldone_dec:
movq %rdx,%rax
.byte 0xf3,0xc3
+.cfi_endproc
.size xor128_decrypt_n_pad,.-xor128_decrypt_n_pad
diff --git a/secure/lib/libcrypto/amd64/rc4-x86_64.S b/secure/lib/libcrypto/amd64/rc4-x86_64.S
index b77714c170f3f..a084e9b9c993d 100644
--- a/secure/lib/libcrypto/amd64/rc4-x86_64.S
+++ b/secure/lib/libcrypto/amd64/rc4-x86_64.S
@@ -6,11 +6,12 @@
.globl RC4
.type RC4,@function
.align 16
-RC4: orq %rsi,%rsi
+RC4:
+.cfi_startproc
+ orq %rsi,%rsi
jne .Lentry
.byte 0xf3,0xc3
.Lentry:
-.cfi_startproc
pushq %rbx
.cfi_adjust_cfa_offset 8
.cfi_offset %rbx,-16
@@ -535,6 +536,7 @@ RC4: orq %rsi,%rsi
.type RC4_set_key,@function
.align 16
RC4_set_key:
+.cfi_startproc
leaq 8(%rdi),%rdi
leaq (%rdx,%rsi,1),%rdx
negq %rsi
@@ -601,12 +603,14 @@ RC4_set_key:
movl %eax,-8(%rdi)
movl %eax,-4(%rdi)
.byte 0xf3,0xc3
+.cfi_endproc
.size RC4_set_key,.-RC4_set_key
.globl RC4_options
.type RC4_options,@function
.align 16
RC4_options:
+.cfi_startproc
leaq .Lopts(%rip),%rax
movl OPENSSL_ia32cap_P(%rip),%edx
btl $20,%edx
@@ -619,6 +623,7 @@ RC4_options:
addq $12,%rax
.Ldone:
.byte 0xf3,0xc3
+.cfi_endproc
.align 64
.Lopts:
.byte 114,99,52,40,56,120,44,105,110,116,41,0
diff --git a/secure/lib/libcrypto/amd64/rsaz-avx2.S b/secure/lib/libcrypto/amd64/rsaz-avx2.S
index 3075a52d2eec5..e957915a7d81c 100644
--- a/secure/lib/libcrypto/amd64/rsaz-avx2.S
+++ b/secure/lib/libcrypto/amd64/rsaz-avx2.S
@@ -2,1745 +2,26 @@
/* Do not modify. This file is auto-generated from rsaz-avx2.pl. */
.text
+.globl rsaz_avx2_eligible
+.type rsaz_avx2_eligible,@function
+rsaz_avx2_eligible:
+ xorl %eax,%eax
+ .byte 0xf3,0xc3
+.size rsaz_avx2_eligible,.-rsaz_avx2_eligible
+
.globl rsaz_1024_sqr_avx2
+.globl rsaz_1024_mul_avx2
+.globl rsaz_1024_norm2red_avx2
+.globl rsaz_1024_red2norm_avx2
+.globl rsaz_1024_scatter5_avx2
+.globl rsaz_1024_gather5_avx2
.type rsaz_1024_sqr_avx2,@function
-.align 64
rsaz_1024_sqr_avx2:
-.cfi_startproc
- leaq (%rsp),%rax
-.cfi_def_cfa_register %rax
- pushq %rbx
-.cfi_offset %rbx,-16
- pushq %rbp
-.cfi_offset %rbp,-24
- pushq %r12
-.cfi_offset %r12,-32
- pushq %r13
-.cfi_offset %r13,-40
- pushq %r14
-.cfi_offset %r14,-48
- pushq %r15
-.cfi_offset %r15,-56
- vzeroupper
- movq %rax,%rbp
-.cfi_def_cfa_register %rbp
- movq %rdx,%r13
- subq $832,%rsp
- movq %r13,%r15
- subq $-128,%rdi
- subq $-128,%rsi
- subq $-128,%r13
-
- andq $4095,%r15
- addq $320,%r15
- shrq $12,%r15
- vpxor %ymm9,%ymm9,%ymm9
- jz .Lsqr_1024_no_n_copy
-
-
-
-
-
- subq $320,%rsp
- vmovdqu 0-128(%r13),%ymm0
- andq $-2048,%rsp
- vmovdqu 32-128(%r13),%ymm1
- vmovdqu 64-128(%r13),%ymm2
- vmovdqu 96-128(%r13),%ymm3
- vmovdqu 128-128(%r13),%ymm4
- vmovdqu 160-128(%r13),%ymm5
- vmovdqu 192-128(%r13),%ymm6
- vmovdqu 224-128(%r13),%ymm7
- vmovdqu 256-128(%r13),%ymm8
- leaq 832+128(%rsp),%r13
- vmovdqu %ymm0,0-128(%r13)
- vmovdqu %ymm1,32-128(%r13)
- vmovdqu %ymm2,64-128(%r13)
- vmovdqu %ymm3,96-128(%r13)
- vmovdqu %ymm4,128-128(%r13)
- vmovdqu %ymm5,160-128(%r13)
- vmovdqu %ymm6,192-128(%r13)
- vmovdqu %ymm7,224-128(%r13)
- vmovdqu %ymm8,256-128(%r13)
- vmovdqu %ymm9,288-128(%r13)
-
-.Lsqr_1024_no_n_copy:
- andq $-1024,%rsp
-
- vmovdqu 32-128(%rsi),%ymm1
- vmovdqu 64-128(%rsi),%ymm2
- vmovdqu 96-128(%rsi),%ymm3
- vmovdqu 128-128(%rsi),%ymm4
- vmovdqu 160-128(%rsi),%ymm5
- vmovdqu 192-128(%rsi),%ymm6
- vmovdqu 224-128(%rsi),%ymm7
- vmovdqu 256-128(%rsi),%ymm8
-
- leaq 192(%rsp),%rbx
- vmovdqu .Land_mask(%rip),%ymm15
- jmp .LOOP_GRANDE_SQR_1024
-
-.align 32
-.LOOP_GRANDE_SQR_1024:
- leaq 576+128(%rsp),%r9
- leaq 448(%rsp),%r12
-
-
-
-
- vpaddq %ymm1,%ymm1,%ymm1
- vpbroadcastq 0-128(%rsi),%ymm10
- vpaddq %ymm2,%ymm2,%ymm2
- vmovdqa %ymm1,0-128(%r9)
- vpaddq %ymm3,%ymm3,%ymm3
- vmovdqa %ymm2,32-128(%r9)
- vpaddq %ymm4,%ymm4,%ymm4
- vmovdqa %ymm3,64-128(%r9)
- vpaddq %ymm5,%ymm5,%ymm5
- vmovdqa %ymm4,96-128(%r9)
- vpaddq %ymm6,%ymm6,%ymm6
- vmovdqa %ymm5,128-128(%r9)
- vpaddq %ymm7,%ymm7,%ymm7
- vmovdqa %ymm6,160-128(%r9)
- vpaddq %ymm8,%ymm8,%ymm8
- vmovdqa %ymm7,192-128(%r9)
- vpxor %ymm9,%ymm9,%ymm9
- vmovdqa %ymm8,224-128(%r9)
-
- vpmuludq 0-128(%rsi),%ymm10,%ymm0
- vpbroadcastq 32-128(%rsi),%ymm11
- vmovdqu %ymm9,288-192(%rbx)
- vpmuludq %ymm10,%ymm1,%ymm1
- vmovdqu %ymm9,320-448(%r12)
- vpmuludq %ymm10,%ymm2,%ymm2
- vmovdqu %ymm9,352-448(%r12)
- vpmuludq %ymm10,%ymm3,%ymm3
- vmovdqu %ymm9,384-448(%r12)
- vpmuludq %ymm10,%ymm4,%ymm4
- vmovdqu %ymm9,416-448(%r12)
- vpmuludq %ymm10,%ymm5,%ymm5
- vmovdqu %ymm9,448-448(%r12)
- vpmuludq %ymm10,%ymm6,%ymm6
- vmovdqu %ymm9,480-448(%r12)
- vpmuludq %ymm10,%ymm7,%ymm7
- vmovdqu %ymm9,512-448(%r12)
- vpmuludq %ymm10,%ymm8,%ymm8
- vpbroadcastq 64-128(%rsi),%ymm10
- vmovdqu %ymm9,544-448(%r12)
-
- movq %rsi,%r15
- movl $4,%r14d
- jmp .Lsqr_entry_1024
-.align 32
-.LOOP_SQR_1024:
- vpbroadcastq 32-128(%r15),%ymm11
- vpmuludq 0-128(%rsi),%ymm10,%ymm0
- vpaddq 0-192(%rbx),%ymm0,%ymm0
- vpmuludq 0-128(%r9),%ymm10,%ymm1
- vpaddq 32-192(%rbx),%ymm1,%ymm1
- vpmuludq 32-128(%r9),%ymm10,%ymm2
- vpaddq 64-192(%rbx),%ymm2,%ymm2
- vpmuludq 64-128(%r9),%ymm10,%ymm3
- vpaddq 96-192(%rbx),%ymm3,%ymm3
- vpmuludq 96-128(%r9),%ymm10,%ymm4
- vpaddq 128-192(%rbx),%ymm4,%ymm4
- vpmuludq 128-128(%r9),%ymm10,%ymm5
- vpaddq 160-192(%rbx),%ymm5,%ymm5
- vpmuludq 160-128(%r9),%ymm10,%ymm6
- vpaddq 192-192(%rbx),%ymm6,%ymm6
- vpmuludq 192-128(%r9),%ymm10,%ymm7
- vpaddq 224-192(%rbx),%ymm7,%ymm7
- vpmuludq 224-128(%r9),%ymm10,%ymm8
- vpbroadcastq 64-128(%r15),%ymm10
- vpaddq 256-192(%rbx),%ymm8,%ymm8
-.Lsqr_entry_1024:
- vmovdqu %ymm0,0-192(%rbx)
- vmovdqu %ymm1,32-192(%rbx)
-
- vpmuludq 32-128(%rsi),%ymm11,%ymm12
- vpaddq %ymm12,%ymm2,%ymm2
- vpmuludq 32-128(%r9),%ymm11,%ymm14
- vpaddq %ymm14,%ymm3,%ymm3
- vpmuludq 64-128(%r9),%ymm11,%ymm13
- vpaddq %ymm13,%ymm4,%ymm4
- vpmuludq 96-128(%r9),%ymm11,%ymm12
- vpaddq %ymm12,%ymm5,%ymm5
- vpmuludq 128-128(%r9),%ymm11,%ymm14
- vpaddq %ymm14,%ymm6,%ymm6
- vpmuludq 160-128(%r9),%ymm11,%ymm13
- vpaddq %ymm13,%ymm7,%ymm7
- vpmuludq 192-128(%r9),%ymm11,%ymm12
- vpaddq %ymm12,%ymm8,%ymm8
- vpmuludq 224-128(%r9),%ymm11,%ymm0
- vpbroadcastq 96-128(%r15),%ymm11
- vpaddq 288-192(%rbx),%ymm0,%ymm0
-
- vmovdqu %ymm2,64-192(%rbx)
- vmovdqu %ymm3,96-192(%rbx)
-
- vpmuludq 64-128(%rsi),%ymm10,%ymm13
- vpaddq %ymm13,%ymm4,%ymm4
- vpmuludq 64-128(%r9),%ymm10,%ymm12
- vpaddq %ymm12,%ymm5,%ymm5
- vpmuludq 96-128(%r9),%ymm10,%ymm14
- vpaddq %ymm14,%ymm6,%ymm6
- vpmuludq 128-128(%r9),%ymm10,%ymm13
- vpaddq %ymm13,%ymm7,%ymm7
- vpmuludq 160-128(%r9),%ymm10,%ymm12
- vpaddq %ymm12,%ymm8,%ymm8
- vpmuludq 192-128(%r9),%ymm10,%ymm14
- vpaddq %ymm14,%ymm0,%ymm0
- vpmuludq 224-128(%r9),%ymm10,%ymm1
- vpbroadcastq 128-128(%r15),%ymm10
- vpaddq 320-448(%r12),%ymm1,%ymm1
-
- vmovdqu %ymm4,128-192(%rbx)
- vmovdqu %ymm5,160-192(%rbx)
-
- vpmuludq 96-128(%rsi),%ymm11,%ymm12
- vpaddq %ymm12,%ymm6,%ymm6
- vpmuludq 96-128(%r9),%ymm11,%ymm14
- vpaddq %ymm14,%ymm7,%ymm7
- vpmuludq 128-128(%r9),%ymm11,%ymm13
- vpaddq %ymm13,%ymm8,%ymm8
- vpmuludq 160-128(%r9),%ymm11,%ymm12
- vpaddq %ymm12,%ymm0,%ymm0
- vpmuludq 192-128(%r9),%ymm11,%ymm14
- vpaddq %ymm14,%ymm1,%ymm1
- vpmuludq 224-128(%r9),%ymm11,%ymm2
- vpbroadcastq 160-128(%r15),%ymm11
- vpaddq 352-448(%r12),%ymm2,%ymm2
-
- vmovdqu %ymm6,192-192(%rbx)
- vmovdqu %ymm7,224-192(%rbx)
-
- vpmuludq 128-128(%rsi),%ymm10,%ymm12
- vpaddq %ymm12,%ymm8,%ymm8
- vpmuludq 128-128(%r9),%ymm10,%ymm14
- vpaddq %ymm14,%ymm0,%ymm0
- vpmuludq 160-128(%r9),%ymm10,%ymm13
- vpaddq %ymm13,%ymm1,%ymm1
- vpmuludq 192-128(%r9),%ymm10,%ymm12
- vpaddq %ymm12,%ymm2,%ymm2
- vpmuludq 224-128(%r9),%ymm10,%ymm3
- vpbroadcastq 192-128(%r15),%ymm10
- vpaddq 384-448(%r12),%ymm3,%ymm3
-
- vmovdqu %ymm8,256-192(%rbx)
- vmovdqu %ymm0,288-192(%rbx)
- leaq 8(%rbx),%rbx
-
- vpmuludq 160-128(%rsi),%ymm11,%ymm13
- vpaddq %ymm13,%ymm1,%ymm1
- vpmuludq 160-128(%r9),%ymm11,%ymm12
- vpaddq %ymm12,%ymm2,%ymm2
- vpmuludq 192-128(%r9),%ymm11,%ymm14
- vpaddq %ymm14,%ymm3,%ymm3
- vpmuludq 224-128(%r9),%ymm11,%ymm4
- vpbroadcastq 224-128(%r15),%ymm11
- vpaddq 416-448(%r12),%ymm4,%ymm4
-
- vmovdqu %ymm1,320-448(%r12)
- vmovdqu %ymm2,352-448(%r12)
-
- vpmuludq 192-128(%rsi),%ymm10,%ymm12
- vpaddq %ymm12,%ymm3,%ymm3
- vpmuludq 192-128(%r9),%ymm10,%ymm14
- vpbroadcastq 256-128(%r15),%ymm0
- vpaddq %ymm14,%ymm4,%ymm4
- vpmuludq 224-128(%r9),%ymm10,%ymm5
- vpbroadcastq 0+8-128(%r15),%ymm10
- vpaddq 448-448(%r12),%ymm5,%ymm5
-
- vmovdqu %ymm3,384-448(%r12)
- vmovdqu %ymm4,416-448(%r12)
- leaq 8(%r15),%r15
-
- vpmuludq 224-128(%rsi),%ymm11,%ymm12
- vpaddq %ymm12,%ymm5,%ymm5
- vpmuludq 224-128(%r9),%ymm11,%ymm6
- vpaddq 480-448(%r12),%ymm6,%ymm6
-
- vpmuludq 256-128(%rsi),%ymm0,%ymm7
- vmovdqu %ymm5,448-448(%r12)
- vpaddq 512-448(%r12),%ymm7,%ymm7
- vmovdqu %ymm6,480-448(%r12)
- vmovdqu %ymm7,512-448(%r12)
- leaq 8(%r12),%r12
-
- decl %r14d
- jnz .LOOP_SQR_1024
-
- vmovdqu 256(%rsp),%ymm8
- vmovdqu 288(%rsp),%ymm1
- vmovdqu 320(%rsp),%ymm2
- leaq 192(%rsp),%rbx
-
- vpsrlq $29,%ymm8,%ymm14
- vpand %ymm15,%ymm8,%ymm8
- vpsrlq $29,%ymm1,%ymm11
- vpand %ymm15,%ymm1,%ymm1
-
- vpermq $0x93,%ymm14,%ymm14
- vpxor %ymm9,%ymm9,%ymm9
- vpermq $0x93,%ymm11,%ymm11
-
- vpblendd $3,%ymm9,%ymm14,%ymm10
- vpblendd $3,%ymm14,%ymm11,%ymm14
- vpaddq %ymm10,%ymm8,%ymm8
- vpblendd $3,%ymm11,%ymm9,%ymm11
- vpaddq %ymm14,%ymm1,%ymm1
- vpaddq %ymm11,%ymm2,%ymm2
- vmovdqu %ymm1,288-192(%rbx)
- vmovdqu %ymm2,320-192(%rbx)
-
- movq (%rsp),%rax
- movq 8(%rsp),%r10
- movq 16(%rsp),%r11
- movq 24(%rsp),%r12
- vmovdqu 32(%rsp),%ymm1
- vmovdqu 64-192(%rbx),%ymm2
- vmovdqu 96-192(%rbx),%ymm3
- vmovdqu 128-192(%rbx),%ymm4
- vmovdqu 160-192(%rbx),%ymm5
- vmovdqu 192-192(%rbx),%ymm6
- vmovdqu 224-192(%rbx),%ymm7
-
- movq %rax,%r9
- imull %ecx,%eax
- andl $0x1fffffff,%eax
- vmovd %eax,%xmm12
-
- movq %rax,%rdx
- imulq -128(%r13),%rax
- vpbroadcastq %xmm12,%ymm12
- addq %rax,%r9
- movq %rdx,%rax
- imulq 8-128(%r13),%rax
- shrq $29,%r9
- addq %rax,%r10
- movq %rdx,%rax
- imulq 16-128(%r13),%rax
- addq %r9,%r10
- addq %rax,%r11
- imulq 24-128(%r13),%rdx
- addq %rdx,%r12
-
- movq %r10,%rax
- imull %ecx,%eax
- andl $0x1fffffff,%eax
-
- movl $9,%r14d
- jmp .LOOP_REDUCE_1024
-
-.align 32
-.LOOP_REDUCE_1024:
- vmovd %eax,%xmm13
- vpbroadcastq %xmm13,%ymm13
-
- vpmuludq 32-128(%r13),%ymm12,%ymm10
- movq %rax,%rdx
- imulq -128(%r13),%rax
- vpaddq %ymm10,%ymm1,%ymm1
- addq %rax,%r10
- vpmuludq 64-128(%r13),%ymm12,%ymm14
- movq %rdx,%rax
- imulq 8-128(%r13),%rax
- vpaddq %ymm14,%ymm2,%ymm2
- vpmuludq 96-128(%r13),%ymm12,%ymm11
-.byte 0x67
- addq %rax,%r11
-.byte 0x67
- movq %rdx,%rax
- imulq 16-128(%r13),%rax
- shrq $29,%r10
- vpaddq %ymm11,%ymm3,%ymm3
- vpmuludq 128-128(%r13),%ymm12,%ymm10
- addq %rax,%r12
- addq %r10,%r11
- vpaddq %ymm10,%ymm4,%ymm4
- vpmuludq 160-128(%r13),%ymm12,%ymm14
- movq %r11,%rax
- imull %ecx,%eax
- vpaddq %ymm14,%ymm5,%ymm5
- vpmuludq 192-128(%r13),%ymm12,%ymm11
- andl $0x1fffffff,%eax
- vpaddq %ymm11,%ymm6,%ymm6
- vpmuludq 224-128(%r13),%ymm12,%ymm10
- vpaddq %ymm10,%ymm7,%ymm7
- vpmuludq 256-128(%r13),%ymm12,%ymm14
- vmovd %eax,%xmm12
-
- vpaddq %ymm14,%ymm8,%ymm8
-
- vpbroadcastq %xmm12,%ymm12
-
- vpmuludq 32-8-128(%r13),%ymm13,%ymm11
- vmovdqu 96-8-128(%r13),%ymm14
- movq %rax,%rdx
- imulq -128(%r13),%rax
- vpaddq %ymm11,%ymm1,%ymm1
- vpmuludq 64-8-128(%r13),%ymm13,%ymm10
- vmovdqu 128-8-128(%r13),%ymm11
- addq %rax,%r11
- movq %rdx,%rax
- imulq 8-128(%r13),%rax
- vpaddq %ymm10,%ymm2,%ymm2
- addq %r12,%rax
- shrq $29,%r11
- vpmuludq %ymm13,%ymm14,%ymm14
- vmovdqu 160-8-128(%r13),%ymm10
- addq %r11,%rax
- vpaddq %ymm14,%ymm3,%ymm3
- vpmuludq %ymm13,%ymm11,%ymm11
- vmovdqu 192-8-128(%r13),%ymm14
-.byte 0x67
- movq %rax,%r12
- imull %ecx,%eax
- vpaddq %ymm11,%ymm4,%ymm4
- vpmuludq %ymm13,%ymm10,%ymm10
-.byte 0xc4,0x41,0x7e,0x6f,0x9d,0x58,0x00,0x00,0x00
- andl $0x1fffffff,%eax
- vpaddq %ymm10,%ymm5,%ymm5
- vpmuludq %ymm13,%ymm14,%ymm14
- vmovdqu 256-8-128(%r13),%ymm10
- vpaddq %ymm14,%ymm6,%ymm6
- vpmuludq %ymm13,%ymm11,%ymm11
- vmovdqu 288-8-128(%r13),%ymm9
- vmovd %eax,%xmm0
- imulq -128(%r13),%rax
- vpaddq %ymm11,%ymm7,%ymm7
- vpmuludq %ymm13,%ymm10,%ymm10
- vmovdqu 32-16-128(%r13),%ymm14
- vpbroadcastq %xmm0,%ymm0
- vpaddq %ymm10,%ymm8,%ymm8
- vpmuludq %ymm13,%ymm9,%ymm9
- vmovdqu 64-16-128(%r13),%ymm11
- addq %rax,%r12
-
- vmovdqu 32-24-128(%r13),%ymm13
- vpmuludq %ymm12,%ymm14,%ymm14
- vmovdqu 96-16-128(%r13),%ymm10
- vpaddq %ymm14,%ymm1,%ymm1
- vpmuludq %ymm0,%ymm13,%ymm13
- vpmuludq %ymm12,%ymm11,%ymm11
-.byte 0xc4,0x41,0x7e,0x6f,0xb5,0xf0,0xff,0xff,0xff
- vpaddq %ymm1,%ymm13,%ymm13
- vpaddq %ymm11,%ymm2,%ymm2
- vpmuludq %ymm12,%ymm10,%ymm10
- vmovdqu 160-16-128(%r13),%ymm11
-.byte 0x67
- vmovq %xmm13,%rax
- vmovdqu %ymm13,(%rsp)
- vpaddq %ymm10,%ymm3,%ymm3
- vpmuludq %ymm12,%ymm14,%ymm14
- vmovdqu 192-16-128(%r13),%ymm10
- vpaddq %ymm14,%ymm4,%ymm4
- vpmuludq %ymm12,%ymm11,%ymm11
- vmovdqu 224-16-128(%r13),%ymm14
- vpaddq %ymm11,%ymm5,%ymm5
- vpmuludq %ymm12,%ymm10,%ymm10
- vmovdqu 256-16-128(%r13),%ymm11
- vpaddq %ymm10,%ymm6,%ymm6
- vpmuludq %ymm12,%ymm14,%ymm14
- shrq $29,%r12
- vmovdqu 288-16-128(%r13),%ymm10
- addq %r12,%rax
- vpaddq %ymm14,%ymm7,%ymm7
- vpmuludq %ymm12,%ymm11,%ymm11
-
- movq %rax,%r9
- imull %ecx,%eax
- vpaddq %ymm11,%ymm8,%ymm8
- vpmuludq %ymm12,%ymm10,%ymm10
- andl $0x1fffffff,%eax
- vmovd %eax,%xmm12
- vmovdqu 96-24-128(%r13),%ymm11
-.byte 0x67
- vpaddq %ymm10,%ymm9,%ymm9
- vpbroadcastq %xmm12,%ymm12
-
- vpmuludq 64-24-128(%r13),%ymm0,%ymm14
- vmovdqu 128-24-128(%r13),%ymm10
- movq %rax,%rdx
- imulq -128(%r13),%rax
- movq 8(%rsp),%r10
- vpaddq %ymm14,%ymm2,%ymm1
- vpmuludq %ymm0,%ymm11,%ymm11
- vmovdqu 160-24-128(%r13),%ymm14
- addq %rax,%r9
- movq %rdx,%rax
- imulq 8-128(%r13),%rax
-.byte 0x67
- shrq $29,%r9
- movq 16(%rsp),%r11
- vpaddq %ymm11,%ymm3,%ymm2
- vpmuludq %ymm0,%ymm10,%ymm10
- vmovdqu 192-24-128(%r13),%ymm11
- addq %rax,%r10
- movq %rdx,%rax
- imulq 16-128(%r13),%rax
- vpaddq %ymm10,%ymm4,%ymm3
- vpmuludq %ymm0,%ymm14,%ymm14
- vmovdqu 224-24-128(%r13),%ymm10
- imulq 24-128(%r13),%rdx
- addq %rax,%r11
- leaq (%r9,%r10,1),%rax
- vpaddq %ymm14,%ymm5,%ymm4
- vpmuludq %ymm0,%ymm11,%ymm11
- vmovdqu 256-24-128(%r13),%ymm14
- movq %rax,%r10
- imull %ecx,%eax
- vpmuludq %ymm0,%ymm10,%ymm10
- vpaddq %ymm11,%ymm6,%ymm5
- vmovdqu 288-24-128(%r13),%ymm11
- andl $0x1fffffff,%eax
- vpaddq %ymm10,%ymm7,%ymm6
- vpmuludq %ymm0,%ymm14,%ymm14
- addq 24(%rsp),%rdx
- vpaddq %ymm14,%ymm8,%ymm7
- vpmuludq %ymm0,%ymm11,%ymm11
- vpaddq %ymm11,%ymm9,%ymm8
- vmovq %r12,%xmm9
- movq %rdx,%r12
-
- decl %r14d
- jnz .LOOP_REDUCE_1024
- leaq 448(%rsp),%r12
- vpaddq %ymm9,%ymm13,%ymm0
- vpxor %ymm9,%ymm9,%ymm9
-
- vpaddq 288-192(%rbx),%ymm0,%ymm0
- vpaddq 320-448(%r12),%ymm1,%ymm1
- vpaddq 352-448(%r12),%ymm2,%ymm2
- vpaddq 384-448(%r12),%ymm3,%ymm3
- vpaddq 416-448(%r12),%ymm4,%ymm4
- vpaddq 448-448(%r12),%ymm5,%ymm5
- vpaddq 480-448(%r12),%ymm6,%ymm6
- vpaddq 512-448(%r12),%ymm7,%ymm7
- vpaddq 544-448(%r12),%ymm8,%ymm8
-
- vpsrlq $29,%ymm0,%ymm14
- vpand %ymm15,%ymm0,%ymm0
- vpsrlq $29,%ymm1,%ymm11
- vpand %ymm15,%ymm1,%ymm1
- vpsrlq $29,%ymm2,%ymm12
- vpermq $0x93,%ymm14,%ymm14
- vpand %ymm15,%ymm2,%ymm2
- vpsrlq $29,%ymm3,%ymm13
- vpermq $0x93,%ymm11,%ymm11
- vpand %ymm15,%ymm3,%ymm3
- vpermq $0x93,%ymm12,%ymm12
-
- vpblendd $3,%ymm9,%ymm14,%ymm10
- vpermq $0x93,%ymm13,%ymm13
- vpblendd $3,%ymm14,%ymm11,%ymm14
- vpaddq %ymm10,%ymm0,%ymm0
- vpblendd $3,%ymm11,%ymm12,%ymm11
- vpaddq %ymm14,%ymm1,%ymm1
- vpblendd $3,%ymm12,%ymm13,%ymm12
- vpaddq %ymm11,%ymm2,%ymm2
- vpblendd $3,%ymm13,%ymm9,%ymm13
- vpaddq %ymm12,%ymm3,%ymm3
- vpaddq %ymm13,%ymm4,%ymm4
-
- vpsrlq $29,%ymm0,%ymm14
- vpand %ymm15,%ymm0,%ymm0
- vpsrlq $29,%ymm1,%ymm11
- vpand %ymm15,%ymm1,%ymm1
- vpsrlq $29,%ymm2,%ymm12
- vpermq $0x93,%ymm14,%ymm14
- vpand %ymm15,%ymm2,%ymm2
- vpsrlq $29,%ymm3,%ymm13
- vpermq $0x93,%ymm11,%ymm11
- vpand %ymm15,%ymm3,%ymm3
- vpermq $0x93,%ymm12,%ymm12
-
- vpblendd $3,%ymm9,%ymm14,%ymm10
- vpermq $0x93,%ymm13,%ymm13
- vpblendd $3,%ymm14,%ymm11,%ymm14
- vpaddq %ymm10,%ymm0,%ymm0
- vpblendd $3,%ymm11,%ymm12,%ymm11
- vpaddq %ymm14,%ymm1,%ymm1
- vmovdqu %ymm0,0-128(%rdi)
- vpblendd $3,%ymm12,%ymm13,%ymm12
- vpaddq %ymm11,%ymm2,%ymm2
- vmovdqu %ymm1,32-128(%rdi)
- vpblendd $3,%ymm13,%ymm9,%ymm13
- vpaddq %ymm12,%ymm3,%ymm3
- vmovdqu %ymm2,64-128(%rdi)
- vpaddq %ymm13,%ymm4,%ymm4
- vmovdqu %ymm3,96-128(%rdi)
- vpsrlq $29,%ymm4,%ymm14
- vpand %ymm15,%ymm4,%ymm4
- vpsrlq $29,%ymm5,%ymm11
- vpand %ymm15,%ymm5,%ymm5
- vpsrlq $29,%ymm6,%ymm12
- vpermq $0x93,%ymm14,%ymm14
- vpand %ymm15,%ymm6,%ymm6
- vpsrlq $29,%ymm7,%ymm13
- vpermq $0x93,%ymm11,%ymm11
- vpand %ymm15,%ymm7,%ymm7
- vpsrlq $29,%ymm8,%ymm0
- vpermq $0x93,%ymm12,%ymm12
- vpand %ymm15,%ymm8,%ymm8
- vpermq $0x93,%ymm13,%ymm13
-
- vpblendd $3,%ymm9,%ymm14,%ymm10
- vpermq $0x93,%ymm0,%ymm0
- vpblendd $3,%ymm14,%ymm11,%ymm14
- vpaddq %ymm10,%ymm4,%ymm4
- vpblendd $3,%ymm11,%ymm12,%ymm11
- vpaddq %ymm14,%ymm5,%ymm5
- vpblendd $3,%ymm12,%ymm13,%ymm12
- vpaddq %ymm11,%ymm6,%ymm6
- vpblendd $3,%ymm13,%ymm0,%ymm13
- vpaddq %ymm12,%ymm7,%ymm7
- vpaddq %ymm13,%ymm8,%ymm8
-
- vpsrlq $29,%ymm4,%ymm14
- vpand %ymm15,%ymm4,%ymm4
- vpsrlq $29,%ymm5,%ymm11
- vpand %ymm15,%ymm5,%ymm5
- vpsrlq $29,%ymm6,%ymm12
- vpermq $0x93,%ymm14,%ymm14
- vpand %ymm15,%ymm6,%ymm6
- vpsrlq $29,%ymm7,%ymm13
- vpermq $0x93,%ymm11,%ymm11
- vpand %ymm15,%ymm7,%ymm7
- vpsrlq $29,%ymm8,%ymm0
- vpermq $0x93,%ymm12,%ymm12
- vpand %ymm15,%ymm8,%ymm8
- vpermq $0x93,%ymm13,%ymm13
-
- vpblendd $3,%ymm9,%ymm14,%ymm10
- vpermq $0x93,%ymm0,%ymm0
- vpblendd $3,%ymm14,%ymm11,%ymm14
- vpaddq %ymm10,%ymm4,%ymm4
- vpblendd $3,%ymm11,%ymm12,%ymm11
- vpaddq %ymm14,%ymm5,%ymm5
- vmovdqu %ymm4,128-128(%rdi)
- vpblendd $3,%ymm12,%ymm13,%ymm12
- vpaddq %ymm11,%ymm6,%ymm6
- vmovdqu %ymm5,160-128(%rdi)
- vpblendd $3,%ymm13,%ymm0,%ymm13
- vpaddq %ymm12,%ymm7,%ymm7
- vmovdqu %ymm6,192-128(%rdi)
- vpaddq %ymm13,%ymm8,%ymm8
- vmovdqu %ymm7,224-128(%rdi)
- vmovdqu %ymm8,256-128(%rdi)
-
- movq %rdi,%rsi
- decl %r8d
- jne .LOOP_GRANDE_SQR_1024
-
- vzeroall
- movq %rbp,%rax
-.cfi_def_cfa_register %rax
- movq -48(%rax),%r15
-.cfi_restore %r15
- movq -40(%rax),%r14
-.cfi_restore %r14
- movq -32(%rax),%r13
-.cfi_restore %r13
- movq -24(%rax),%r12
-.cfi_restore %r12
- movq -16(%rax),%rbp
-.cfi_restore %rbp
- movq -8(%rax),%rbx
-.cfi_restore %rbx
- leaq (%rax),%rsp
-.cfi_def_cfa_register %rsp
-.Lsqr_1024_epilogue:
- .byte 0xf3,0xc3
-.cfi_endproc
-.size rsaz_1024_sqr_avx2,.-rsaz_1024_sqr_avx2
-.globl rsaz_1024_mul_avx2
-.type rsaz_1024_mul_avx2,@function
-.align 64
rsaz_1024_mul_avx2:
-.cfi_startproc
- leaq (%rsp),%rax
-.cfi_def_cfa_register %rax
- pushq %rbx
-.cfi_offset %rbx,-16
- pushq %rbp
-.cfi_offset %rbp,-24
- pushq %r12
-.cfi_offset %r12,-32
- pushq %r13
-.cfi_offset %r13,-40
- pushq %r14
-.cfi_offset %r14,-48
- pushq %r15
-.cfi_offset %r15,-56
- movq %rax,%rbp
-.cfi_def_cfa_register %rbp
- vzeroall
- movq %rdx,%r13
- subq $64,%rsp
-
-
-
-
-
-
-.byte 0x67,0x67
- movq %rsi,%r15
- andq $4095,%r15
- addq $320,%r15
- shrq $12,%r15
- movq %rsi,%r15
- cmovnzq %r13,%rsi
- cmovnzq %r15,%r13
-
- movq %rcx,%r15
- subq $-128,%rsi
- subq $-128,%rcx
- subq $-128,%rdi
-
- andq $4095,%r15
- addq $320,%r15
-.byte 0x67,0x67
- shrq $12,%r15
- jz .Lmul_1024_no_n_copy
-
-
-
-
-
- subq $320,%rsp
- vmovdqu 0-128(%rcx),%ymm0
- andq $-512,%rsp
- vmovdqu 32-128(%rcx),%ymm1
- vmovdqu 64-128(%rcx),%ymm2
- vmovdqu 96-128(%rcx),%ymm3
- vmovdqu 128-128(%rcx),%ymm4
- vmovdqu 160-128(%rcx),%ymm5
- vmovdqu 192-128(%rcx),%ymm6
- vmovdqu 224-128(%rcx),%ymm7
- vmovdqu 256-128(%rcx),%ymm8
- leaq 64+128(%rsp),%rcx
- vmovdqu %ymm0,0-128(%rcx)
- vpxor %ymm0,%ymm0,%ymm0
- vmovdqu %ymm1,32-128(%rcx)
- vpxor %ymm1,%ymm1,%ymm1
- vmovdqu %ymm2,64-128(%rcx)
- vpxor %ymm2,%ymm2,%ymm2
- vmovdqu %ymm3,96-128(%rcx)
- vpxor %ymm3,%ymm3,%ymm3
- vmovdqu %ymm4,128-128(%rcx)
- vpxor %ymm4,%ymm4,%ymm4
- vmovdqu %ymm5,160-128(%rcx)
- vpxor %ymm5,%ymm5,%ymm5
- vmovdqu %ymm6,192-128(%rcx)
- vpxor %ymm6,%ymm6,%ymm6
- vmovdqu %ymm7,224-128(%rcx)
- vpxor %ymm7,%ymm7,%ymm7
- vmovdqu %ymm8,256-128(%rcx)
- vmovdqa %ymm0,%ymm8
- vmovdqu %ymm9,288-128(%rcx)
-.Lmul_1024_no_n_copy:
- andq $-64,%rsp
-
- movq (%r13),%rbx
- vpbroadcastq (%r13),%ymm10
- vmovdqu %ymm0,(%rsp)
- xorq %r9,%r9
-.byte 0x67
- xorq %r10,%r10
- xorq %r11,%r11
- xorq %r12,%r12
-
- vmovdqu .Land_mask(%rip),%ymm15
- movl $9,%r14d
- vmovdqu %ymm9,288-128(%rdi)
- jmp .Loop_mul_1024
-
-.align 32
-.Loop_mul_1024:
- vpsrlq $29,%ymm3,%ymm9
- movq %rbx,%rax
- imulq -128(%rsi),%rax
- addq %r9,%rax
- movq %rbx,%r10
- imulq 8-128(%rsi),%r10
- addq 8(%rsp),%r10
-
- movq %rax,%r9
- imull %r8d,%eax
- andl $0x1fffffff,%eax
-
- movq %rbx,%r11
- imulq 16-128(%rsi),%r11
- addq 16(%rsp),%r11
-
- movq %rbx,%r12
- imulq 24-128(%rsi),%r12
- addq 24(%rsp),%r12
- vpmuludq 32-128(%rsi),%ymm10,%ymm0
- vmovd %eax,%xmm11
- vpaddq %ymm0,%ymm1,%ymm1
- vpmuludq 64-128(%rsi),%ymm10,%ymm12
- vpbroadcastq %xmm11,%ymm11
- vpaddq %ymm12,%ymm2,%ymm2
- vpmuludq 96-128(%rsi),%ymm10,%ymm13
- vpand %ymm15,%ymm3,%ymm3
- vpaddq %ymm13,%ymm3,%ymm3
- vpmuludq 128-128(%rsi),%ymm10,%ymm0
- vpaddq %ymm0,%ymm4,%ymm4
- vpmuludq 160-128(%rsi),%ymm10,%ymm12
- vpaddq %ymm12,%ymm5,%ymm5
- vpmuludq 192-128(%rsi),%ymm10,%ymm13
- vpaddq %ymm13,%ymm6,%ymm6
- vpmuludq 224-128(%rsi),%ymm10,%ymm0
- vpermq $0x93,%ymm9,%ymm9
- vpaddq %ymm0,%ymm7,%ymm7
- vpmuludq 256-128(%rsi),%ymm10,%ymm12
- vpbroadcastq 8(%r13),%ymm10
- vpaddq %ymm12,%ymm8,%ymm8
-
- movq %rax,%rdx
- imulq -128(%rcx),%rax
- addq %rax,%r9
- movq %rdx,%rax
- imulq 8-128(%rcx),%rax
- addq %rax,%r10
- movq %rdx,%rax
- imulq 16-128(%rcx),%rax
- addq %rax,%r11
- shrq $29,%r9
- imulq 24-128(%rcx),%rdx
- addq %rdx,%r12
- addq %r9,%r10
-
- vpmuludq 32-128(%rcx),%ymm11,%ymm13
- vmovq %xmm10,%rbx
- vpaddq %ymm13,%ymm1,%ymm1
- vpmuludq 64-128(%rcx),%ymm11,%ymm0
- vpaddq %ymm0,%ymm2,%ymm2
- vpmuludq 96-128(%rcx),%ymm11,%ymm12
- vpaddq %ymm12,%ymm3,%ymm3
- vpmuludq 128-128(%rcx),%ymm11,%ymm13
- vpaddq %ymm13,%ymm4,%ymm4
- vpmuludq 160-128(%rcx),%ymm11,%ymm0
- vpaddq %ymm0,%ymm5,%ymm5
- vpmuludq 192-128(%rcx),%ymm11,%ymm12
- vpaddq %ymm12,%ymm6,%ymm6
- vpmuludq 224-128(%rcx),%ymm11,%ymm13
- vpblendd $3,%ymm14,%ymm9,%ymm12
- vpaddq %ymm13,%ymm7,%ymm7
- vpmuludq 256-128(%rcx),%ymm11,%ymm0
- vpaddq %ymm12,%ymm3,%ymm3
- vpaddq %ymm0,%ymm8,%ymm8
-
- movq %rbx,%rax
- imulq -128(%rsi),%rax
- addq %rax,%r10
- vmovdqu -8+32-128(%rsi),%ymm12
- movq %rbx,%rax
- imulq 8-128(%rsi),%rax
- addq %rax,%r11
- vmovdqu -8+64-128(%rsi),%ymm13
-
- movq %r10,%rax
- vpblendd $0xfc,%ymm14,%ymm9,%ymm9
- imull %r8d,%eax
- vpaddq %ymm9,%ymm4,%ymm4
- andl $0x1fffffff,%eax
-
- imulq 16-128(%rsi),%rbx
- addq %rbx,%r12
- vpmuludq %ymm10,%ymm12,%ymm12
- vmovd %eax,%xmm11
- vmovdqu -8+96-128(%rsi),%ymm0
- vpaddq %ymm12,%ymm1,%ymm1
- vpmuludq %ymm10,%ymm13,%ymm13
- vpbroadcastq %xmm11,%ymm11
- vmovdqu -8+128-128(%rsi),%ymm12
- vpaddq %ymm13,%ymm2,%ymm2
- vpmuludq %ymm10,%ymm0,%ymm0
- vmovdqu -8+160-128(%rsi),%ymm13
- vpaddq %ymm0,%ymm3,%ymm3
- vpmuludq %ymm10,%ymm12,%ymm12
- vmovdqu -8+192-128(%rsi),%ymm0
- vpaddq %ymm12,%ymm4,%ymm4
- vpmuludq %ymm10,%ymm13,%ymm13
- vmovdqu -8+224-128(%rsi),%ymm12
- vpaddq %ymm13,%ymm5,%ymm5
- vpmuludq %ymm10,%ymm0,%ymm0
- vmovdqu -8+256-128(%rsi),%ymm13
- vpaddq %ymm0,%ymm6,%ymm6
- vpmuludq %ymm10,%ymm12,%ymm12
- vmovdqu -8+288-128(%rsi),%ymm9
- vpaddq %ymm12,%ymm7,%ymm7
- vpmuludq %ymm10,%ymm13,%ymm13
- vpaddq %ymm13,%ymm8,%ymm8
- vpmuludq %ymm10,%ymm9,%ymm9
- vpbroadcastq 16(%r13),%ymm10
-
- movq %rax,%rdx
- imulq -128(%rcx),%rax
- addq %rax,%r10
- vmovdqu -8+32-128(%rcx),%ymm0
- movq %rdx,%rax
- imulq 8-128(%rcx),%rax
- addq %rax,%r11
- vmovdqu -8+64-128(%rcx),%ymm12
- shrq $29,%r10
- imulq 16-128(%rcx),%rdx
- addq %rdx,%r12
- addq %r10,%r11
-
- vpmuludq %ymm11,%ymm0,%ymm0
- vmovq %xmm10,%rbx
- vmovdqu -8+96-128(%rcx),%ymm13
- vpaddq %ymm0,%ymm1,%ymm1
- vpmuludq %ymm11,%ymm12,%ymm12
- vmovdqu -8+128-128(%rcx),%ymm0
- vpaddq %ymm12,%ymm2,%ymm2
- vpmuludq %ymm11,%ymm13,%ymm13
- vmovdqu -8+160-128(%rcx),%ymm12
- vpaddq %ymm13,%ymm3,%ymm3
- vpmuludq %ymm11,%ymm0,%ymm0
- vmovdqu -8+192-128(%rcx),%ymm13
- vpaddq %ymm0,%ymm4,%ymm4
- vpmuludq %ymm11,%ymm12,%ymm12
- vmovdqu -8+224-128(%rcx),%ymm0
- vpaddq %ymm12,%ymm5,%ymm5
- vpmuludq %ymm11,%ymm13,%ymm13
- vmovdqu -8+256-128(%rcx),%ymm12
- vpaddq %ymm13,%ymm6,%ymm6
- vpmuludq %ymm11,%ymm0,%ymm0
- vmovdqu -8+288-128(%rcx),%ymm13
- vpaddq %ymm0,%ymm7,%ymm7
- vpmuludq %ymm11,%ymm12,%ymm12
- vpaddq %ymm12,%ymm8,%ymm8
- vpmuludq %ymm11,%ymm13,%ymm13
- vpaddq %ymm13,%ymm9,%ymm9
-
- vmovdqu -16+32-128(%rsi),%ymm0
- movq %rbx,%rax
- imulq -128(%rsi),%rax
- addq %r11,%rax
-
- vmovdqu -16+64-128(%rsi),%ymm12
- movq %rax,%r11
- imull %r8d,%eax
- andl $0x1fffffff,%eax
-
- imulq 8-128(%rsi),%rbx
- addq %rbx,%r12
- vpmuludq %ymm10,%ymm0,%ymm0
- vmovd %eax,%xmm11
- vmovdqu -16+96-128(%rsi),%ymm13
- vpaddq %ymm0,%ymm1,%ymm1
- vpmuludq %ymm10,%ymm12,%ymm12
- vpbroadcastq %xmm11,%ymm11
- vmovdqu -16+128-128(%rsi),%ymm0
- vpaddq %ymm12,%ymm2,%ymm2
- vpmuludq %ymm10,%ymm13,%ymm13
- vmovdqu -16+160-128(%rsi),%ymm12
- vpaddq %ymm13,%ymm3,%ymm3
- vpmuludq %ymm10,%ymm0,%ymm0
- vmovdqu -16+192-128(%rsi),%ymm13
- vpaddq %ymm0,%ymm4,%ymm4
- vpmuludq %ymm10,%ymm12,%ymm12
- vmovdqu -16+224-128(%rsi),%ymm0
- vpaddq %ymm12,%ymm5,%ymm5
- vpmuludq %ymm10,%ymm13,%ymm13
- vmovdqu -16+256-128(%rsi),%ymm12
- vpaddq %ymm13,%ymm6,%ymm6
- vpmuludq %ymm10,%ymm0,%ymm0
- vmovdqu -16+288-128(%rsi),%ymm13
- vpaddq %ymm0,%ymm7,%ymm7
- vpmuludq %ymm10,%ymm12,%ymm12
- vpaddq %ymm12,%ymm8,%ymm8
- vpmuludq %ymm10,%ymm13,%ymm13
- vpbroadcastq 24(%r13),%ymm10
- vpaddq %ymm13,%ymm9,%ymm9
-
- vmovdqu -16+32-128(%rcx),%ymm0
- movq %rax,%rdx
- imulq -128(%rcx),%rax
- addq %rax,%r11
- vmovdqu -16+64-128(%rcx),%ymm12
- imulq 8-128(%rcx),%rdx
- addq %rdx,%r12
- shrq $29,%r11
-
- vpmuludq %ymm11,%ymm0,%ymm0
- vmovq %xmm10,%rbx
- vmovdqu -16+96-128(%rcx),%ymm13
- vpaddq %ymm0,%ymm1,%ymm1
- vpmuludq %ymm11,%ymm12,%ymm12
- vmovdqu -16+128-128(%rcx),%ymm0
- vpaddq %ymm12,%ymm2,%ymm2
- vpmuludq %ymm11,%ymm13,%ymm13
- vmovdqu -16+160-128(%rcx),%ymm12
- vpaddq %ymm13,%ymm3,%ymm3
- vpmuludq %ymm11,%ymm0,%ymm0
- vmovdqu -16+192-128(%rcx),%ymm13
- vpaddq %ymm0,%ymm4,%ymm4
- vpmuludq %ymm11,%ymm12,%ymm12
- vmovdqu -16+224-128(%rcx),%ymm0
- vpaddq %ymm12,%ymm5,%ymm5
- vpmuludq %ymm11,%ymm13,%ymm13
- vmovdqu -16+256-128(%rcx),%ymm12
- vpaddq %ymm13,%ymm6,%ymm6
- vpmuludq %ymm11,%ymm0,%ymm0
- vmovdqu -16+288-128(%rcx),%ymm13
- vpaddq %ymm0,%ymm7,%ymm7
- vpmuludq %ymm11,%ymm12,%ymm12
- vmovdqu -24+32-128(%rsi),%ymm0
- vpaddq %ymm12,%ymm8,%ymm8
- vpmuludq %ymm11,%ymm13,%ymm13
- vmovdqu -24+64-128(%rsi),%ymm12
- vpaddq %ymm13,%ymm9,%ymm9
-
- addq %r11,%r12
- imulq -128(%rsi),%rbx
- addq %rbx,%r12
-
- movq %r12,%rax
- imull %r8d,%eax
- andl $0x1fffffff,%eax
-
- vpmuludq %ymm10,%ymm0,%ymm0
- vmovd %eax,%xmm11
- vmovdqu -24+96-128(%rsi),%ymm13
- vpaddq %ymm0,%ymm1,%ymm1
- vpmuludq %ymm10,%ymm12,%ymm12
- vpbroadcastq %xmm11,%ymm11
- vmovdqu -24+128-128(%rsi),%ymm0
- vpaddq %ymm12,%ymm2,%ymm2
- vpmuludq %ymm10,%ymm13,%ymm13
- vmovdqu -24+160-128(%rsi),%ymm12
- vpaddq %ymm13,%ymm3,%ymm3
- vpmuludq %ymm10,%ymm0,%ymm0
- vmovdqu -24+192-128(%rsi),%ymm13
- vpaddq %ymm0,%ymm4,%ymm4
- vpmuludq %ymm10,%ymm12,%ymm12
- vmovdqu -24+224-128(%rsi),%ymm0
- vpaddq %ymm12,%ymm5,%ymm5
- vpmuludq %ymm10,%ymm13,%ymm13
- vmovdqu -24+256-128(%rsi),%ymm12
- vpaddq %ymm13,%ymm6,%ymm6
- vpmuludq %ymm10,%ymm0,%ymm0
- vmovdqu -24+288-128(%rsi),%ymm13
- vpaddq %ymm0,%ymm7,%ymm7
- vpmuludq %ymm10,%ymm12,%ymm12
- vpaddq %ymm12,%ymm8,%ymm8
- vpmuludq %ymm10,%ymm13,%ymm13
- vpbroadcastq 32(%r13),%ymm10
- vpaddq %ymm13,%ymm9,%ymm9
- addq $32,%r13
-
- vmovdqu -24+32-128(%rcx),%ymm0
- imulq -128(%rcx),%rax
- addq %rax,%r12
- shrq $29,%r12
-
- vmovdqu -24+64-128(%rcx),%ymm12
- vpmuludq %ymm11,%ymm0,%ymm0
- vmovq %xmm10,%rbx
- vmovdqu -24+96-128(%rcx),%ymm13
- vpaddq %ymm0,%ymm1,%ymm0
- vpmuludq %ymm11,%ymm12,%ymm12
- vmovdqu %ymm0,(%rsp)
- vpaddq %ymm12,%ymm2,%ymm1
- vmovdqu -24+128-128(%rcx),%ymm0
- vpmuludq %ymm11,%ymm13,%ymm13
- vmovdqu -24+160-128(%rcx),%ymm12
- vpaddq %ymm13,%ymm3,%ymm2
- vpmuludq %ymm11,%ymm0,%ymm0
- vmovdqu -24+192-128(%rcx),%ymm13
- vpaddq %ymm0,%ymm4,%ymm3
- vpmuludq %ymm11,%ymm12,%ymm12
- vmovdqu -24+224-128(%rcx),%ymm0
- vpaddq %ymm12,%ymm5,%ymm4
- vpmuludq %ymm11,%ymm13,%ymm13
- vmovdqu -24+256-128(%rcx),%ymm12
- vpaddq %ymm13,%ymm6,%ymm5
- vpmuludq %ymm11,%ymm0,%ymm0
- vmovdqu -24+288-128(%rcx),%ymm13
- movq %r12,%r9
- vpaddq %ymm0,%ymm7,%ymm6
- vpmuludq %ymm11,%ymm12,%ymm12
- addq (%rsp),%r9
- vpaddq %ymm12,%ymm8,%ymm7
- vpmuludq %ymm11,%ymm13,%ymm13
- vmovq %r12,%xmm12
- vpaddq %ymm13,%ymm9,%ymm8
-
- decl %r14d
- jnz .Loop_mul_1024
- vpaddq (%rsp),%ymm12,%ymm0
-
- vpsrlq $29,%ymm0,%ymm12
- vpand %ymm15,%ymm0,%ymm0
- vpsrlq $29,%ymm1,%ymm13
- vpand %ymm15,%ymm1,%ymm1
- vpsrlq $29,%ymm2,%ymm10
- vpermq $0x93,%ymm12,%ymm12
- vpand %ymm15,%ymm2,%ymm2
- vpsrlq $29,%ymm3,%ymm11
- vpermq $0x93,%ymm13,%ymm13
- vpand %ymm15,%ymm3,%ymm3
-
- vpblendd $3,%ymm14,%ymm12,%ymm9
- vpermq $0x93,%ymm10,%ymm10
- vpblendd $3,%ymm12,%ymm13,%ymm12
- vpermq $0x93,%ymm11,%ymm11
- vpaddq %ymm9,%ymm0,%ymm0
- vpblendd $3,%ymm13,%ymm10,%ymm13
- vpaddq %ymm12,%ymm1,%ymm1
- vpblendd $3,%ymm10,%ymm11,%ymm10
- vpaddq %ymm13,%ymm2,%ymm2
- vpblendd $3,%ymm11,%ymm14,%ymm11
- vpaddq %ymm10,%ymm3,%ymm3
- vpaddq %ymm11,%ymm4,%ymm4
-
- vpsrlq $29,%ymm0,%ymm12
- vpand %ymm15,%ymm0,%ymm0
- vpsrlq $29,%ymm1,%ymm13
- vpand %ymm15,%ymm1,%ymm1
- vpsrlq $29,%ymm2,%ymm10
- vpermq $0x93,%ymm12,%ymm12
- vpand %ymm15,%ymm2,%ymm2
- vpsrlq $29,%ymm3,%ymm11
- vpermq $0x93,%ymm13,%ymm13
- vpand %ymm15,%ymm3,%ymm3
- vpermq $0x93,%ymm10,%ymm10
-
- vpblendd $3,%ymm14,%ymm12,%ymm9
- vpermq $0x93,%ymm11,%ymm11
- vpblendd $3,%ymm12,%ymm13,%ymm12
- vpaddq %ymm9,%ymm0,%ymm0
- vpblendd $3,%ymm13,%ymm10,%ymm13
- vpaddq %ymm12,%ymm1,%ymm1
- vpblendd $3,%ymm10,%ymm11,%ymm10
- vpaddq %ymm13,%ymm2,%ymm2
- vpblendd $3,%ymm11,%ymm14,%ymm11
- vpaddq %ymm10,%ymm3,%ymm3
- vpaddq %ymm11,%ymm4,%ymm4
-
- vmovdqu %ymm0,0-128(%rdi)
- vmovdqu %ymm1,32-128(%rdi)
- vmovdqu %ymm2,64-128(%rdi)
- vmovdqu %ymm3,96-128(%rdi)
- vpsrlq $29,%ymm4,%ymm12
- vpand %ymm15,%ymm4,%ymm4
- vpsrlq $29,%ymm5,%ymm13
- vpand %ymm15,%ymm5,%ymm5
- vpsrlq $29,%ymm6,%ymm10
- vpermq $0x93,%ymm12,%ymm12
- vpand %ymm15,%ymm6,%ymm6
- vpsrlq $29,%ymm7,%ymm11
- vpermq $0x93,%ymm13,%ymm13
- vpand %ymm15,%ymm7,%ymm7
- vpsrlq $29,%ymm8,%ymm0
- vpermq $0x93,%ymm10,%ymm10
- vpand %ymm15,%ymm8,%ymm8
- vpermq $0x93,%ymm11,%ymm11
-
- vpblendd $3,%ymm14,%ymm12,%ymm9
- vpermq $0x93,%ymm0,%ymm0
- vpblendd $3,%ymm12,%ymm13,%ymm12
- vpaddq %ymm9,%ymm4,%ymm4
- vpblendd $3,%ymm13,%ymm10,%ymm13
- vpaddq %ymm12,%ymm5,%ymm5
- vpblendd $3,%ymm10,%ymm11,%ymm10
- vpaddq %ymm13,%ymm6,%ymm6
- vpblendd $3,%ymm11,%ymm0,%ymm11
- vpaddq %ymm10,%ymm7,%ymm7
- vpaddq %ymm11,%ymm8,%ymm8
-
- vpsrlq $29,%ymm4,%ymm12
- vpand %ymm15,%ymm4,%ymm4
- vpsrlq $29,%ymm5,%ymm13
- vpand %ymm15,%ymm5,%ymm5
- vpsrlq $29,%ymm6,%ymm10
- vpermq $0x93,%ymm12,%ymm12
- vpand %ymm15,%ymm6,%ymm6
- vpsrlq $29,%ymm7,%ymm11
- vpermq $0x93,%ymm13,%ymm13
- vpand %ymm15,%ymm7,%ymm7
- vpsrlq $29,%ymm8,%ymm0
- vpermq $0x93,%ymm10,%ymm10
- vpand %ymm15,%ymm8,%ymm8
- vpermq $0x93,%ymm11,%ymm11
-
- vpblendd $3,%ymm14,%ymm12,%ymm9
- vpermq $0x93,%ymm0,%ymm0
- vpblendd $3,%ymm12,%ymm13,%ymm12
- vpaddq %ymm9,%ymm4,%ymm4
- vpblendd $3,%ymm13,%ymm10,%ymm13
- vpaddq %ymm12,%ymm5,%ymm5
- vpblendd $3,%ymm10,%ymm11,%ymm10
- vpaddq %ymm13,%ymm6,%ymm6
- vpblendd $3,%ymm11,%ymm0,%ymm11
- vpaddq %ymm10,%ymm7,%ymm7
- vpaddq %ymm11,%ymm8,%ymm8
-
- vmovdqu %ymm4,128-128(%rdi)
- vmovdqu %ymm5,160-128(%rdi)
- vmovdqu %ymm6,192-128(%rdi)
- vmovdqu %ymm7,224-128(%rdi)
- vmovdqu %ymm8,256-128(%rdi)
- vzeroupper
-
- movq %rbp,%rax
-.cfi_def_cfa_register %rax
- movq -48(%rax),%r15
-.cfi_restore %r15
- movq -40(%rax),%r14
-.cfi_restore %r14
- movq -32(%rax),%r13
-.cfi_restore %r13
- movq -24(%rax),%r12
-.cfi_restore %r12
- movq -16(%rax),%rbp
-.cfi_restore %rbp
- movq -8(%rax),%rbx
-.cfi_restore %rbx
- leaq (%rax),%rsp
-.cfi_def_cfa_register %rsp
-.Lmul_1024_epilogue:
- .byte 0xf3,0xc3
-.cfi_endproc
-.size rsaz_1024_mul_avx2,.-rsaz_1024_mul_avx2
-.globl rsaz_1024_red2norm_avx2
-.type rsaz_1024_red2norm_avx2,@function
-.align 32
-rsaz_1024_red2norm_avx2:
-.cfi_startproc
- subq $-128,%rsi
- xorq %rax,%rax
- movq -128(%rsi),%r8
- movq -120(%rsi),%r9
- movq -112(%rsi),%r10
- shlq $0,%r8
- shlq $29,%r9
- movq %r10,%r11
- shlq $58,%r10
- shrq $6,%r11
- addq %r8,%rax
- addq %r9,%rax
- addq %r10,%rax
- adcq $0,%r11
- movq %rax,0(%rdi)
- movq %r11,%rax
- movq -104(%rsi),%r8
- movq -96(%rsi),%r9
- shlq $23,%r8
- movq %r9,%r10
- shlq $52,%r9
- shrq $12,%r10
- addq %r8,%rax
- addq %r9,%rax
- adcq $0,%r10
- movq %rax,8(%rdi)
- movq %r10,%rax
- movq -88(%rsi),%r11
- movq -80(%rsi),%r8
- shlq $17,%r11
- movq %r8,%r9
- shlq $46,%r8
- shrq $18,%r9
- addq %r11,%rax
- addq %r8,%rax
- adcq $0,%r9
- movq %rax,16(%rdi)
- movq %r9,%rax
- movq -72(%rsi),%r10
- movq -64(%rsi),%r11
- shlq $11,%r10
- movq %r11,%r8
- shlq $40,%r11
- shrq $24,%r8
- addq %r10,%rax
- addq %r11,%rax
- adcq $0,%r8
- movq %rax,24(%rdi)
- movq %r8,%rax
- movq -56(%rsi),%r9
- movq -48(%rsi),%r10
- movq -40(%rsi),%r11
- shlq $5,%r9
- shlq $34,%r10
- movq %r11,%r8
- shlq $63,%r11
- shrq $1,%r8
- addq %r9,%rax
- addq %r10,%rax
- addq %r11,%rax
- adcq $0,%r8
- movq %rax,32(%rdi)
- movq %r8,%rax
- movq -32(%rsi),%r9
- movq -24(%rsi),%r10
- shlq $28,%r9
- movq %r10,%r11
- shlq $57,%r10
- shrq $7,%r11
- addq %r9,%rax
- addq %r10,%rax
- adcq $0,%r11
- movq %rax,40(%rdi)
- movq %r11,%rax
- movq -16(%rsi),%r8
- movq -8(%rsi),%r9
- shlq $22,%r8
- movq %r9,%r10
- shlq $51,%r9
- shrq $13,%r10
- addq %r8,%rax
- addq %r9,%rax
- adcq $0,%r10
- movq %rax,48(%rdi)
- movq %r10,%rax
- movq 0(%rsi),%r11
- movq 8(%rsi),%r8
- shlq $16,%r11
- movq %r8,%r9
- shlq $45,%r8
- shrq $19,%r9
- addq %r11,%rax
- addq %r8,%rax
- adcq $0,%r9
- movq %rax,56(%rdi)
- movq %r9,%rax
- movq 16(%rsi),%r10
- movq 24(%rsi),%r11
- shlq $10,%r10
- movq %r11,%r8
- shlq $39,%r11
- shrq $25,%r8
- addq %r10,%rax
- addq %r11,%rax
- adcq $0,%r8
- movq %rax,64(%rdi)
- movq %r8,%rax
- movq 32(%rsi),%r9
- movq 40(%rsi),%r10
- movq 48(%rsi),%r11
- shlq $4,%r9
- shlq $33,%r10
- movq %r11,%r8
- shlq $62,%r11
- shrq $2,%r8
- addq %r9,%rax
- addq %r10,%rax
- addq %r11,%rax
- adcq $0,%r8
- movq %rax,72(%rdi)
- movq %r8,%rax
- movq 56(%rsi),%r9
- movq 64(%rsi),%r10
- shlq $27,%r9
- movq %r10,%r11
- shlq $56,%r10
- shrq $8,%r11
- addq %r9,%rax
- addq %r10,%rax
- adcq $0,%r11
- movq %rax,80(%rdi)
- movq %r11,%rax
- movq 72(%rsi),%r8
- movq 80(%rsi),%r9
- shlq $21,%r8
- movq %r9,%r10
- shlq $50,%r9
- shrq $14,%r10
- addq %r8,%rax
- addq %r9,%rax
- adcq $0,%r10
- movq %rax,88(%rdi)
- movq %r10,%rax
- movq 88(%rsi),%r11
- movq 96(%rsi),%r8
- shlq $15,%r11
- movq %r8,%r9
- shlq $44,%r8
- shrq $20,%r9
- addq %r11,%rax
- addq %r8,%rax
- adcq $0,%r9
- movq %rax,96(%rdi)
- movq %r9,%rax
- movq 104(%rsi),%r10
- movq 112(%rsi),%r11
- shlq $9,%r10
- movq %r11,%r8
- shlq $38,%r11
- shrq $26,%r8
- addq %r10,%rax
- addq %r11,%rax
- adcq $0,%r8
- movq %rax,104(%rdi)
- movq %r8,%rax
- movq 120(%rsi),%r9
- movq 128(%rsi),%r10
- movq 136(%rsi),%r11
- shlq $3,%r9
- shlq $32,%r10
- movq %r11,%r8
- shlq $61,%r11
- shrq $3,%r8
- addq %r9,%rax
- addq %r10,%rax
- addq %r11,%rax
- adcq $0,%r8
- movq %rax,112(%rdi)
- movq %r8,%rax
- movq 144(%rsi),%r9
- movq 152(%rsi),%r10
- shlq $26,%r9
- movq %r10,%r11
- shlq $55,%r10
- shrq $9,%r11
- addq %r9,%rax
- addq %r10,%rax
- adcq $0,%r11
- movq %rax,120(%rdi)
- movq %r11,%rax
- .byte 0xf3,0xc3
-.cfi_endproc
-.size rsaz_1024_red2norm_avx2,.-rsaz_1024_red2norm_avx2
-
-.globl rsaz_1024_norm2red_avx2
-.type rsaz_1024_norm2red_avx2,@function
-.align 32
rsaz_1024_norm2red_avx2:
-.cfi_startproc
- subq $-128,%rdi
- movq (%rsi),%r8
- movl $0x1fffffff,%eax
- movq 8(%rsi),%r9
- movq %r8,%r11
- shrq $0,%r11
- andq %rax,%r11
- movq %r11,-128(%rdi)
- movq %r8,%r10
- shrq $29,%r10
- andq %rax,%r10
- movq %r10,-120(%rdi)
- shrdq $58,%r9,%r8
- andq %rax,%r8
- movq %r8,-112(%rdi)
- movq 16(%rsi),%r10
- movq %r9,%r8
- shrq $23,%r8
- andq %rax,%r8
- movq %r8,-104(%rdi)
- shrdq $52,%r10,%r9
- andq %rax,%r9
- movq %r9,-96(%rdi)
- movq 24(%rsi),%r11
- movq %r10,%r9
- shrq $17,%r9
- andq %rax,%r9
- movq %r9,-88(%rdi)
- shrdq $46,%r11,%r10
- andq %rax,%r10
- movq %r10,-80(%rdi)
- movq 32(%rsi),%r8
- movq %r11,%r10
- shrq $11,%r10
- andq %rax,%r10
- movq %r10,-72(%rdi)
- shrdq $40,%r8,%r11
- andq %rax,%r11
- movq %r11,-64(%rdi)
- movq 40(%rsi),%r9
- movq %r8,%r11
- shrq $5,%r11
- andq %rax,%r11
- movq %r11,-56(%rdi)
- movq %r8,%r10
- shrq $34,%r10
- andq %rax,%r10
- movq %r10,-48(%rdi)
- shrdq $63,%r9,%r8
- andq %rax,%r8
- movq %r8,-40(%rdi)
- movq 48(%rsi),%r10
- movq %r9,%r8
- shrq $28,%r8
- andq %rax,%r8
- movq %r8,-32(%rdi)
- shrdq $57,%r10,%r9
- andq %rax,%r9
- movq %r9,-24(%rdi)
- movq 56(%rsi),%r11
- movq %r10,%r9
- shrq $22,%r9
- andq %rax,%r9
- movq %r9,-16(%rdi)
- shrdq $51,%r11,%r10
- andq %rax,%r10
- movq %r10,-8(%rdi)
- movq 64(%rsi),%r8
- movq %r11,%r10
- shrq $16,%r10
- andq %rax,%r10
- movq %r10,0(%rdi)
- shrdq $45,%r8,%r11
- andq %rax,%r11
- movq %r11,8(%rdi)
- movq 72(%rsi),%r9
- movq %r8,%r11
- shrq $10,%r11
- andq %rax,%r11
- movq %r11,16(%rdi)
- shrdq $39,%r9,%r8
- andq %rax,%r8
- movq %r8,24(%rdi)
- movq 80(%rsi),%r10
- movq %r9,%r8
- shrq $4,%r8
- andq %rax,%r8
- movq %r8,32(%rdi)
- movq %r9,%r11
- shrq $33,%r11
- andq %rax,%r11
- movq %r11,40(%rdi)
- shrdq $62,%r10,%r9
- andq %rax,%r9
- movq %r9,48(%rdi)
- movq 88(%rsi),%r11
- movq %r10,%r9
- shrq $27,%r9
- andq %rax,%r9
- movq %r9,56(%rdi)
- shrdq $56,%r11,%r10
- andq %rax,%r10
- movq %r10,64(%rdi)
- movq 96(%rsi),%r8
- movq %r11,%r10
- shrq $21,%r10
- andq %rax,%r10
- movq %r10,72(%rdi)
- shrdq $50,%r8,%r11
- andq %rax,%r11
- movq %r11,80(%rdi)
- movq 104(%rsi),%r9
- movq %r8,%r11
- shrq $15,%r11
- andq %rax,%r11
- movq %r11,88(%rdi)
- shrdq $44,%r9,%r8
- andq %rax,%r8
- movq %r8,96(%rdi)
- movq 112(%rsi),%r10
- movq %r9,%r8
- shrq $9,%r8
- andq %rax,%r8
- movq %r8,104(%rdi)
- shrdq $38,%r10,%r9
- andq %rax,%r9
- movq %r9,112(%rdi)
- movq 120(%rsi),%r11
- movq %r10,%r9
- shrq $3,%r9
- andq %rax,%r9
- movq %r9,120(%rdi)
- movq %r10,%r8
- shrq $32,%r8
- andq %rax,%r8
- movq %r8,128(%rdi)
- shrdq $61,%r11,%r10
- andq %rax,%r10
- movq %r10,136(%rdi)
- xorq %r8,%r8
- movq %r11,%r10
- shrq $26,%r10
- andq %rax,%r10
- movq %r10,144(%rdi)
- shrdq $55,%r8,%r11
- andq %rax,%r11
- movq %r11,152(%rdi)
- movq %r8,160(%rdi)
- movq %r8,168(%rdi)
- movq %r8,176(%rdi)
- movq %r8,184(%rdi)
- .byte 0xf3,0xc3
-.cfi_endproc
-.size rsaz_1024_norm2red_avx2,.-rsaz_1024_norm2red_avx2
-.globl rsaz_1024_scatter5_avx2
-.type rsaz_1024_scatter5_avx2,@function
-.align 32
+rsaz_1024_red2norm_avx2:
rsaz_1024_scatter5_avx2:
-.cfi_startproc
- vzeroupper
- vmovdqu .Lscatter_permd(%rip),%ymm5
- shll $4,%edx
- leaq (%rdi,%rdx,1),%rdi
- movl $9,%eax
- jmp .Loop_scatter_1024
-
-.align 32
-.Loop_scatter_1024:
- vmovdqu (%rsi),%ymm0
- leaq 32(%rsi),%rsi
- vpermd %ymm0,%ymm5,%ymm0
- vmovdqu %xmm0,(%rdi)
- leaq 512(%rdi),%rdi
- decl %eax
- jnz .Loop_scatter_1024
-
- vzeroupper
- .byte 0xf3,0xc3
-.cfi_endproc
-.size rsaz_1024_scatter5_avx2,.-rsaz_1024_scatter5_avx2
-
-.globl rsaz_1024_gather5_avx2
-.type rsaz_1024_gather5_avx2,@function
-.align 32
rsaz_1024_gather5_avx2:
-.cfi_startproc
- vzeroupper
- movq %rsp,%r11
-.cfi_def_cfa_register %r11
- leaq -256(%rsp),%rsp
- andq $-32,%rsp
- leaq .Linc(%rip),%r10
- leaq -128(%rsp),%rax
-
- vmovd %edx,%xmm4
- vmovdqa (%r10),%ymm0
- vmovdqa 32(%r10),%ymm1
- vmovdqa 64(%r10),%ymm5
- vpbroadcastd %xmm4,%ymm4
-
- vpaddd %ymm5,%ymm0,%ymm2
- vpcmpeqd %ymm4,%ymm0,%ymm0
- vpaddd %ymm5,%ymm1,%ymm3
- vpcmpeqd %ymm4,%ymm1,%ymm1
- vmovdqa %ymm0,0+128(%rax)
- vpaddd %ymm5,%ymm2,%ymm0
- vpcmpeqd %ymm4,%ymm2,%ymm2
- vmovdqa %ymm1,32+128(%rax)
- vpaddd %ymm5,%ymm3,%ymm1
- vpcmpeqd %ymm4,%ymm3,%ymm3
- vmovdqa %ymm2,64+128(%rax)
- vpaddd %ymm5,%ymm0,%ymm2
- vpcmpeqd %ymm4,%ymm0,%ymm0
- vmovdqa %ymm3,96+128(%rax)
- vpaddd %ymm5,%ymm1,%ymm3
- vpcmpeqd %ymm4,%ymm1,%ymm1
- vmovdqa %ymm0,128+128(%rax)
- vpaddd %ymm5,%ymm2,%ymm8
- vpcmpeqd %ymm4,%ymm2,%ymm2
- vmovdqa %ymm1,160+128(%rax)
- vpaddd %ymm5,%ymm3,%ymm9
- vpcmpeqd %ymm4,%ymm3,%ymm3
- vmovdqa %ymm2,192+128(%rax)
- vpaddd %ymm5,%ymm8,%ymm10
- vpcmpeqd %ymm4,%ymm8,%ymm8
- vmovdqa %ymm3,224+128(%rax)
- vpaddd %ymm5,%ymm9,%ymm11
- vpcmpeqd %ymm4,%ymm9,%ymm9
- vpaddd %ymm5,%ymm10,%ymm12
- vpcmpeqd %ymm4,%ymm10,%ymm10
- vpaddd %ymm5,%ymm11,%ymm13
- vpcmpeqd %ymm4,%ymm11,%ymm11
- vpaddd %ymm5,%ymm12,%ymm14
- vpcmpeqd %ymm4,%ymm12,%ymm12
- vpaddd %ymm5,%ymm13,%ymm15
- vpcmpeqd %ymm4,%ymm13,%ymm13
- vpcmpeqd %ymm4,%ymm14,%ymm14
- vpcmpeqd %ymm4,%ymm15,%ymm15
-
- vmovdqa -32(%r10),%ymm7
- leaq 128(%rsi),%rsi
- movl $9,%edx
-
-.Loop_gather_1024:
- vmovdqa 0-128(%rsi),%ymm0
- vmovdqa 32-128(%rsi),%ymm1
- vmovdqa 64-128(%rsi),%ymm2
- vmovdqa 96-128(%rsi),%ymm3
- vpand 0+128(%rax),%ymm0,%ymm0
- vpand 32+128(%rax),%ymm1,%ymm1
- vpand 64+128(%rax),%ymm2,%ymm2
- vpor %ymm0,%ymm1,%ymm4
- vpand 96+128(%rax),%ymm3,%ymm3
- vmovdqa 128-128(%rsi),%ymm0
- vmovdqa 160-128(%rsi),%ymm1
- vpor %ymm2,%ymm3,%ymm5
- vmovdqa 192-128(%rsi),%ymm2
- vmovdqa 224-128(%rsi),%ymm3
- vpand 128+128(%rax),%ymm0,%ymm0
- vpand 160+128(%rax),%ymm1,%ymm1
- vpand 192+128(%rax),%ymm2,%ymm2
- vpor %ymm0,%ymm4,%ymm4
- vpand 224+128(%rax),%ymm3,%ymm3
- vpand 256-128(%rsi),%ymm8,%ymm0
- vpor %ymm1,%ymm5,%ymm5
- vpand 288-128(%rsi),%ymm9,%ymm1
- vpor %ymm2,%ymm4,%ymm4
- vpand 320-128(%rsi),%ymm10,%ymm2
- vpor %ymm3,%ymm5,%ymm5
- vpand 352-128(%rsi),%ymm11,%ymm3
- vpor %ymm0,%ymm4,%ymm4
- vpand 384-128(%rsi),%ymm12,%ymm0
- vpor %ymm1,%ymm5,%ymm5
- vpand 416-128(%rsi),%ymm13,%ymm1
- vpor %ymm2,%ymm4,%ymm4
- vpand 448-128(%rsi),%ymm14,%ymm2
- vpor %ymm3,%ymm5,%ymm5
- vpand 480-128(%rsi),%ymm15,%ymm3
- leaq 512(%rsi),%rsi
- vpor %ymm0,%ymm4,%ymm4
- vpor %ymm1,%ymm5,%ymm5
- vpor %ymm2,%ymm4,%ymm4
- vpor %ymm3,%ymm5,%ymm5
-
- vpor %ymm5,%ymm4,%ymm4
- vextracti128 $1,%ymm4,%xmm5
- vpor %xmm4,%xmm5,%xmm5
- vpermd %ymm5,%ymm7,%ymm5
- vmovdqu %ymm5,(%rdi)
- leaq 32(%rdi),%rdi
- decl %edx
- jnz .Loop_gather_1024
-
- vpxor %ymm0,%ymm0,%ymm0
- vmovdqu %ymm0,(%rdi)
- vzeroupper
- leaq (%r11),%rsp
-.cfi_def_cfa_register %rsp
+.byte 0x0f,0x0b
.byte 0xf3,0xc3
-.cfi_endproc
-.LSEH_end_rsaz_1024_gather5:
-.size rsaz_1024_gather5_avx2,.-rsaz_1024_gather5_avx2
-
-.globl rsaz_avx2_eligible
-.type rsaz_avx2_eligible,@function
-.align 32
-rsaz_avx2_eligible:
- movl OPENSSL_ia32cap_P+8(%rip),%eax
- movl $524544,%ecx
- movl $0,%edx
- andl %eax,%ecx
- cmpl $524544,%ecx
- cmovel %edx,%eax
- andl $32,%eax
- shrl $5,%eax
- .byte 0xf3,0xc3
-.size rsaz_avx2_eligible,.-rsaz_avx2_eligible
-
-.align 64
-.Land_mask:
-.quad 0x1fffffff,0x1fffffff,0x1fffffff,0x1fffffff
-.Lscatter_permd:
-.long 0,2,4,6,7,7,7,7
-.Lgather_permd:
-.long 0,7,1,7,2,7,3,7
-.Linc:
-.long 0,0,0,0, 1,1,1,1
-.long 2,2,2,2, 3,3,3,3
-.long 4,4,4,4, 4,4,4,4
-.align 64
+.size rsaz_1024_sqr_avx2,.-rsaz_1024_sqr_avx2
diff --git a/secure/lib/libcrypto/amd64/rsaz-x86_64.S b/secure/lib/libcrypto/amd64/rsaz-x86_64.S
index e4e7b0469a538..ae64f7a739873 100644
--- a/secure/lib/libcrypto/amd64/rsaz-x86_64.S
+++ b/secure/lib/libcrypto/amd64/rsaz-x86_64.S
@@ -31,14 +31,10 @@ rsaz_512_sqr:
subq $128+24,%rsp
.cfi_adjust_cfa_offset 128+24
.Lsqr_body:
- movq %rdx,%rbp
+.byte 102,72,15,110,202
movq (%rsi),%rdx
movq 8(%rsi),%rax
movq %rcx,128(%rsp)
- movl $0x80100,%r11d
- andl OPENSSL_ia32cap_P+8(%rip),%r11d
- cmpl $0x80100,%r11d
- je .Loop_sqrx
jmp .Loop_sqr
.align 32
@@ -46,6 +42,7 @@ rsaz_512_sqr:
movl %r8d,128+8(%rsp)
movq %rdx,%rbx
+ movq %rax,%rbp
mulq %rdx
movq %rax,%r8
movq 16(%rsi),%rax
@@ -84,31 +81,29 @@ rsaz_512_sqr:
mulq %rbx
addq %rax,%r14
movq %rbx,%rax
- movq %rdx,%r15
- adcq $0,%r15
+ adcq $0,%rdx
+ xorq %rcx,%rcx
addq %r8,%r8
- movq %r9,%rcx
- adcq %r9,%r9
+ movq %rdx,%r15
+ adcq $0,%rcx
mulq %rax
- movq %rax,(%rsp)
- addq %rdx,%r8
- adcq $0,%r9
+ addq %r8,%rdx
+ adcq $0,%rcx
- movq %r8,8(%rsp)
- shrq $63,%rcx
+ movq %rax,(%rsp)
+ movq %rdx,8(%rsp)
- movq 8(%rsi),%r8
movq 16(%rsi),%rax
- mulq %r8
+ mulq %rbp
addq %rax,%r10
movq 24(%rsi),%rax
movq %rdx,%rbx
adcq $0,%rbx
- mulq %r8
+ mulq %rbp
addq %rax,%r11
movq 32(%rsi),%rax
adcq $0,%rdx
@@ -116,7 +111,7 @@ rsaz_512_sqr:
movq %rdx,%rbx
adcq $0,%rbx
- mulq %r8
+ mulq %rbp
addq %rax,%r12
movq 40(%rsi),%rax
adcq $0,%rdx
@@ -124,7 +119,7 @@ rsaz_512_sqr:
movq %rdx,%rbx
adcq $0,%rbx
- mulq %r8
+ mulq %rbp
addq %rax,%r13
movq 48(%rsi),%rax
adcq $0,%rdx
@@ -132,7 +127,7 @@ rsaz_512_sqr:
movq %rdx,%rbx
adcq $0,%rbx
- mulq %r8
+ mulq %rbp
addq %rax,%r14
movq 56(%rsi),%rax
adcq $0,%rdx
@@ -140,39 +135,39 @@ rsaz_512_sqr:
movq %rdx,%rbx
adcq $0,%rbx
- mulq %r8
+ mulq %rbp
addq %rax,%r15
- movq %r8,%rax
+ movq %rbp,%rax
adcq $0,%rdx
addq %rbx,%r15
- movq %rdx,%r8
- movq %r10,%rdx
- adcq $0,%r8
+ adcq $0,%rdx
- addq %rdx,%rdx
- leaq (%rcx,%r10,2),%r10
- movq %r11,%rbx
- adcq %r11,%r11
+ xorq %rbx,%rbx
+ addq %r9,%r9
+ movq %rdx,%r8
+ adcq %r10,%r10
+ adcq $0,%rbx
mulq %rax
+
+ addq %rcx,%rax
+ movq 16(%rsi),%rbp
addq %rax,%r9
+ movq 24(%rsi),%rax
adcq %rdx,%r10
- adcq $0,%r11
+ adcq $0,%rbx
movq %r9,16(%rsp)
movq %r10,24(%rsp)
- shrq $63,%rbx
- movq 16(%rsi),%r9
- movq 24(%rsi),%rax
- mulq %r9
+ mulq %rbp
addq %rax,%r12
movq 32(%rsi),%rax
movq %rdx,%rcx
adcq $0,%rcx
- mulq %r9
+ mulq %rbp
addq %rax,%r13
movq 40(%rsi),%rax
adcq $0,%rdx
@@ -180,7 +175,7 @@ rsaz_512_sqr:
movq %rdx,%rcx
adcq $0,%rcx
- mulq %r9
+ mulq %rbp
addq %rax,%r14
movq 48(%rsi),%rax
adcq $0,%rdx
@@ -188,9 +183,7 @@ rsaz_512_sqr:
movq %rdx,%rcx
adcq $0,%rcx
- mulq %r9
- movq %r12,%r10
- leaq (%rbx,%r12,2),%r12
+ mulq %rbp
addq %rax,%r15
movq 56(%rsi),%rax
adcq $0,%rdx
@@ -198,36 +191,40 @@ rsaz_512_sqr:
movq %rdx,%rcx
adcq $0,%rcx
- mulq %r9
- shrq $63,%r10
+ mulq %rbp
addq %rax,%r8
- movq %r9,%rax
+ movq %rbp,%rax
adcq $0,%rdx
addq %rcx,%r8
- movq %rdx,%r9
- adcq $0,%r9
+ adcq $0,%rdx
- movq %r13,%rcx
- leaq (%r10,%r13,2),%r13
+ xorq %rcx,%rcx
+ addq %r11,%r11
+ movq %rdx,%r9
+ adcq %r12,%r12
+ adcq $0,%rcx
mulq %rax
+
+ addq %rbx,%rax
+ movq 24(%rsi),%r10
addq %rax,%r11
+ movq 32(%rsi),%rax
adcq %rdx,%r12
- adcq $0,%r13
+ adcq $0,%rcx
movq %r11,32(%rsp)
movq %r12,40(%rsp)
- shrq $63,%rcx
- movq 24(%rsi),%r10
- movq 32(%rsi),%rax
+ movq %rax,%r11
mulq %r10
addq %rax,%r14
movq 40(%rsi),%rax
movq %rdx,%rbx
adcq $0,%rbx
+ movq %rax,%r12
mulq %r10
addq %rax,%r15
movq 48(%rsi),%rax
@@ -236,9 +233,8 @@ rsaz_512_sqr:
movq %rdx,%rbx
adcq $0,%rbx
+ movq %rax,%rbp
mulq %r10
- movq %r14,%r12
- leaq (%rcx,%r14,2),%r14
addq %rax,%r8
movq 56(%rsi),%rax
adcq $0,%rdx
@@ -247,32 +243,33 @@ rsaz_512_sqr:
adcq $0,%rbx
mulq %r10
- shrq $63,%r12
addq %rax,%r9
movq %r10,%rax
adcq $0,%rdx
addq %rbx,%r9
- movq %rdx,%r10
- adcq $0,%r10
+ adcq $0,%rdx
- movq %r15,%rbx
- leaq (%r12,%r15,2),%r15
+ xorq %rbx,%rbx
+ addq %r13,%r13
+ movq %rdx,%r10
+ adcq %r14,%r14
+ adcq $0,%rbx
mulq %rax
+
+ addq %rcx,%rax
addq %rax,%r13
+ movq %r12,%rax
adcq %rdx,%r14
- adcq $0,%r15
+ adcq $0,%rbx
movq %r13,48(%rsp)
movq %r14,56(%rsp)
- shrq $63,%rbx
- movq 32(%rsi),%r11
- movq 40(%rsi),%rax
mulq %r11
addq %rax,%r8
- movq 48(%rsi),%rax
+ movq %rbp,%rax
movq %rdx,%rcx
adcq $0,%rcx
@@ -280,97 +277,99 @@ rsaz_512_sqr:
addq %rax,%r9
movq 56(%rsi),%rax
adcq $0,%rdx
- movq %r8,%r12
- leaq (%rbx,%r8,2),%r8
addq %rcx,%r9
movq %rdx,%rcx
adcq $0,%rcx
+ movq %rax,%r14
mulq %r11
- shrq $63,%r12
addq %rax,%r10
movq %r11,%rax
adcq $0,%rdx
addq %rcx,%r10
- movq %rdx,%r11
- adcq $0,%r11
+ adcq $0,%rdx
- movq %r9,%rcx
- leaq (%r12,%r9,2),%r9
+ xorq %rcx,%rcx
+ addq %r15,%r15
+ movq %rdx,%r11
+ adcq %r8,%r8
+ adcq $0,%rcx
mulq %rax
+
+ addq %rbx,%rax
addq %rax,%r15
+ movq %rbp,%rax
adcq %rdx,%r8
- adcq $0,%r9
+ adcq $0,%rcx
movq %r15,64(%rsp)
movq %r8,72(%rsp)
- shrq $63,%rcx
- movq 40(%rsi),%r12
- movq 48(%rsi),%rax
mulq %r12
addq %rax,%r10
- movq 56(%rsi),%rax
+ movq %r14,%rax
movq %rdx,%rbx
adcq $0,%rbx
mulq %r12
addq %rax,%r11
movq %r12,%rax
- movq %r10,%r15
- leaq (%rcx,%r10,2),%r10
adcq $0,%rdx
- shrq $63,%r15
addq %rbx,%r11
- movq %rdx,%r12
- adcq $0,%r12
+ adcq $0,%rdx
- movq %r11,%rbx
- leaq (%r15,%r11,2),%r11
+ xorq %rbx,%rbx
+ addq %r9,%r9
+ movq %rdx,%r12
+ adcq %r10,%r10
+ adcq $0,%rbx
mulq %rax
+
+ addq %rcx,%rax
addq %rax,%r9
+ movq %r14,%rax
adcq %rdx,%r10
- adcq $0,%r11
+ adcq $0,%rbx
movq %r9,80(%rsp)
movq %r10,88(%rsp)
- movq 48(%rsi),%r13
- movq 56(%rsi),%rax
- mulq %r13
+ mulq %rbp
addq %rax,%r12
- movq %r13,%rax
- movq %rdx,%r13
- adcq $0,%r13
+ movq %rbp,%rax
+ adcq $0,%rdx
- xorq %r14,%r14
- shlq $1,%rbx
+ xorq %rcx,%rcx
+ addq %r11,%r11
+ movq %rdx,%r13
adcq %r12,%r12
- adcq %r13,%r13
- adcq %r14,%r14
+ adcq $0,%rcx
mulq %rax
+
+ addq %rbx,%rax
addq %rax,%r11
+ movq %r14,%rax
adcq %rdx,%r12
- adcq $0,%r13
+ adcq $0,%rcx
movq %r11,96(%rsp)
movq %r12,104(%rsp)
- movq 56(%rsi),%rax
- mulq %rax
- addq %rax,%r13
- adcq $0,%rdx
+ xorq %rbx,%rbx
+ addq %r13,%r13
+ adcq $0,%rbx
- addq %rdx,%r14
+ mulq %rax
- movq %r13,112(%rsp)
- movq %r14,120(%rsp)
+ addq %rcx,%rax
+ addq %r13,%rax
+ adcq %rbx,%rdx
movq (%rsp),%r8
movq 8(%rsp),%r9
@@ -380,276 +379,12 @@ rsaz_512_sqr:
movq 40(%rsp),%r13
movq 48(%rsp),%r14
movq 56(%rsp),%r15
-
- call __rsaz_512_reduce
-
- addq 64(%rsp),%r8
- adcq 72(%rsp),%r9
- adcq 80(%rsp),%r10
- adcq 88(%rsp),%r11
- adcq 96(%rsp),%r12
- adcq 104(%rsp),%r13
- adcq 112(%rsp),%r14
- adcq 120(%rsp),%r15
- sbbq %rcx,%rcx
-
- call __rsaz_512_subtract
-
- movq %r8,%rdx
- movq %r9,%rax
- movl 128+8(%rsp),%r8d
- movq %rdi,%rsi
-
- decl %r8d
- jnz .Loop_sqr
- jmp .Lsqr_tail
-
-.align 32
-.Loop_sqrx:
- movl %r8d,128+8(%rsp)
-.byte 102,72,15,110,199
-.byte 102,72,15,110,205
-
- mulxq %rax,%r8,%r9
-
- mulxq 16(%rsi),%rcx,%r10
- xorq %rbp,%rbp
-
- mulxq 24(%rsi),%rax,%r11
- adcxq %rcx,%r9
-
- mulxq 32(%rsi),%rcx,%r12
- adcxq %rax,%r10
-
- mulxq 40(%rsi),%rax,%r13
- adcxq %rcx,%r11
-
-.byte 0xc4,0x62,0xf3,0xf6,0xb6,0x30,0x00,0x00,0x00
- adcxq %rax,%r12
- adcxq %rcx,%r13
-
-.byte 0xc4,0x62,0xfb,0xf6,0xbe,0x38,0x00,0x00,0x00
- adcxq %rax,%r14
- adcxq %rbp,%r15
-
- movq %r9,%rcx
- shldq $1,%r8,%r9
- shlq $1,%r8
-
- xorl %ebp,%ebp
- mulxq %rdx,%rax,%rdx
- adcxq %rdx,%r8
- movq 8(%rsi),%rdx
- adcxq %rbp,%r9
-
- movq %rax,(%rsp)
- movq %r8,8(%rsp)
-
-
- mulxq 16(%rsi),%rax,%rbx
- adoxq %rax,%r10
- adcxq %rbx,%r11
-
-.byte 0xc4,0x62,0xc3,0xf6,0x86,0x18,0x00,0x00,0x00
- adoxq %rdi,%r11
- adcxq %r8,%r12
-
- mulxq 32(%rsi),%rax,%rbx
- adoxq %rax,%r12
- adcxq %rbx,%r13
-
- mulxq 40(%rsi),%rdi,%r8
- adoxq %rdi,%r13
- adcxq %r8,%r14
-
-.byte 0xc4,0xe2,0xfb,0xf6,0x9e,0x30,0x00,0x00,0x00
- adoxq %rax,%r14
- adcxq %rbx,%r15
-
-.byte 0xc4,0x62,0xc3,0xf6,0x86,0x38,0x00,0x00,0x00
- adoxq %rdi,%r15
- adcxq %rbp,%r8
- adoxq %rbp,%r8
-
- movq %r11,%rbx
- shldq $1,%r10,%r11
- shldq $1,%rcx,%r10
-
- xorl %ebp,%ebp
- mulxq %rdx,%rax,%rcx
- movq 16(%rsi),%rdx
- adcxq %rax,%r9
- adcxq %rcx,%r10
- adcxq %rbp,%r11
-
- movq %r9,16(%rsp)
-.byte 0x4c,0x89,0x94,0x24,0x18,0x00,0x00,0x00
-
-
-.byte 0xc4,0x62,0xc3,0xf6,0x8e,0x18,0x00,0x00,0x00
- adoxq %rdi,%r12
- adcxq %r9,%r13
-
- mulxq 32(%rsi),%rax,%rcx
- adoxq %rax,%r13
- adcxq %rcx,%r14
-
- mulxq 40(%rsi),%rdi,%r9
- adoxq %rdi,%r14
- adcxq %r9,%r15
-
-.byte 0xc4,0xe2,0xfb,0xf6,0x8e,0x30,0x00,0x00,0x00
- adoxq %rax,%r15
- adcxq %rcx,%r8
-
-.byte 0xc4,0x62,0xc3,0xf6,0x8e,0x38,0x00,0x00,0x00
- adoxq %rdi,%r8
- adcxq %rbp,%r9
- adoxq %rbp,%r9
-
- movq %r13,%rcx
- shldq $1,%r12,%r13
- shldq $1,%rbx,%r12
-
- xorl %ebp,%ebp
- mulxq %rdx,%rax,%rdx
- adcxq %rax,%r11
- adcxq %rdx,%r12
- movq 24(%rsi),%rdx
- adcxq %rbp,%r13
-
- movq %r11,32(%rsp)
-.byte 0x4c,0x89,0xa4,0x24,0x28,0x00,0x00,0x00
-
-
-.byte 0xc4,0xe2,0xfb,0xf6,0x9e,0x20,0x00,0x00,0x00
- adoxq %rax,%r14
- adcxq %rbx,%r15
-
- mulxq 40(%rsi),%rdi,%r10
- adoxq %rdi,%r15
- adcxq %r10,%r8
-
- mulxq 48(%rsi),%rax,%rbx
- adoxq %rax,%r8
- adcxq %rbx,%r9
-
- mulxq 56(%rsi),%rdi,%r10
- adoxq %rdi,%r9
- adcxq %rbp,%r10
- adoxq %rbp,%r10
-
-.byte 0x66
- movq %r15,%rbx
- shldq $1,%r14,%r15
- shldq $1,%rcx,%r14
-
- xorl %ebp,%ebp
- mulxq %rdx,%rax,%rdx
- adcxq %rax,%r13
- adcxq %rdx,%r14
- movq 32(%rsi),%rdx
- adcxq %rbp,%r15
-
- movq %r13,48(%rsp)
- movq %r14,56(%rsp)
-
-
-.byte 0xc4,0x62,0xc3,0xf6,0x9e,0x28,0x00,0x00,0x00
- adoxq %rdi,%r8
- adcxq %r11,%r9
-
- mulxq 48(%rsi),%rax,%rcx
- adoxq %rax,%r9
- adcxq %rcx,%r10
-
- mulxq 56(%rsi),%rdi,%r11
- adoxq %rdi,%r10
- adcxq %rbp,%r11
- adoxq %rbp,%r11
-
- movq %r9,%rcx
- shldq $1,%r8,%r9
- shldq $1,%rbx,%r8
-
- xorl %ebp,%ebp
- mulxq %rdx,%rax,%rdx
- adcxq %rax,%r15
- adcxq %rdx,%r8
- movq 40(%rsi),%rdx
- adcxq %rbp,%r9
-
- movq %r15,64(%rsp)
- movq %r8,72(%rsp)
-
-
-.byte 0xc4,0xe2,0xfb,0xf6,0x9e,0x30,0x00,0x00,0x00
- adoxq %rax,%r10
- adcxq %rbx,%r11
-
-.byte 0xc4,0x62,0xc3,0xf6,0xa6,0x38,0x00,0x00,0x00
- adoxq %rdi,%r11
- adcxq %rbp,%r12
- adoxq %rbp,%r12
-
- movq %r11,%rbx
- shldq $1,%r10,%r11
- shldq $1,%rcx,%r10
-
- xorl %ebp,%ebp
- mulxq %rdx,%rax,%rdx
- adcxq %rax,%r9
- adcxq %rdx,%r10
- movq 48(%rsi),%rdx
- adcxq %rbp,%r11
-
- movq %r9,80(%rsp)
- movq %r10,88(%rsp)
-
-
-.byte 0xc4,0x62,0xfb,0xf6,0xae,0x38,0x00,0x00,0x00
- adoxq %rax,%r12
- adoxq %rbp,%r13
-
- xorq %r14,%r14
- shldq $1,%r13,%r14
- shldq $1,%r12,%r13
- shldq $1,%rbx,%r12
-
- xorl %ebp,%ebp
- mulxq %rdx,%rax,%rdx
- adcxq %rax,%r11
- adcxq %rdx,%r12
- movq 56(%rsi),%rdx
- adcxq %rbp,%r13
-
-.byte 0x4c,0x89,0x9c,0x24,0x60,0x00,0x00,0x00
-.byte 0x4c,0x89,0xa4,0x24,0x68,0x00,0x00,0x00
-
-
- mulxq %rdx,%rax,%rdx
- adoxq %rax,%r13
- adoxq %rbp,%rdx
-
-.byte 0x66
- addq %rdx,%r14
-
- movq %r13,112(%rsp)
- movq %r14,120(%rsp)
-.byte 102,72,15,126,199
.byte 102,72,15,126,205
- movq 128(%rsp),%rdx
- movq (%rsp),%r8
- movq 8(%rsp),%r9
- movq 16(%rsp),%r10
- movq 24(%rsp),%r11
- movq 32(%rsp),%r12
- movq 40(%rsp),%r13
- movq 48(%rsp),%r14
- movq 56(%rsp),%r15
+ movq %rax,112(%rsp)
+ movq %rdx,120(%rsp)
- call __rsaz_512_reducex
+ call __rsaz_512_reduce
addq 64(%rsp),%r8
adcq 72(%rsp),%r9
@@ -669,9 +404,7 @@ rsaz_512_sqr:
movq %rdi,%rsi
decl %r8d
- jnz .Loop_sqrx
-
-.Lsqr_tail:
+ jnz .Loop_sqr
leaq 128+24+48(%rsp),%rax
.cfi_def_cfa %rax,8
@@ -723,10 +456,6 @@ rsaz_512_mul:
.byte 102,72,15,110,199
.byte 102,72,15,110,201
movq %r8,128(%rsp)
- movl $0x80100,%r11d
- andl OPENSSL_ia32cap_P+8(%rip),%r11d
- cmpl $0x80100,%r11d
- je .Lmulx
movq (%rdx),%rbx
movq %rdx,%rbp
call __rsaz_512_mul
@@ -744,29 +473,6 @@ rsaz_512_mul:
movq 56(%rsp),%r15
call __rsaz_512_reduce
- jmp .Lmul_tail
-
-.align 32
-.Lmulx:
- movq %rdx,%rbp
- movq (%rdx),%rdx
- call __rsaz_512_mulx
-
-.byte 102,72,15,126,199
-.byte 102,72,15,126,205
-
- movq 128(%rsp),%rdx
- movq (%rsp),%r8
- movq 8(%rsp),%r9
- movq 16(%rsp),%r10
- movq 24(%rsp),%r11
- movq 32(%rsp),%r12
- movq 40(%rsp),%r13
- movq 48(%rsp),%r14
- movq 56(%rsp),%r15
-
- call __rsaz_512_reducex
-.Lmul_tail:
addq 64(%rsp),%r8
adcq 72(%rsp),%r9
adcq 80(%rsp),%r10
@@ -880,10 +586,6 @@ rsaz_512_mul_gather4:
por %xmm9,%xmm8
pshufd $0x4e,%xmm8,%xmm9
por %xmm9,%xmm8
- movl $0x80100,%r11d
- andl OPENSSL_ia32cap_P+8(%rip),%r11d
- cmpl $0x80100,%r11d
- je .Lmulx_gather
.byte 102,76,15,126,195
movq %r8,128(%rsp)
@@ -1064,142 +766,6 @@ rsaz_512_mul_gather4:
movq 56(%rsp),%r15
call __rsaz_512_reduce
- jmp .Lmul_gather_tail
-
-.align 32
-.Lmulx_gather:
-.byte 102,76,15,126,194
-
- movq %r8,128(%rsp)
- movq %rdi,128+8(%rsp)
- movq %rcx,128+16(%rsp)
-
- mulxq (%rsi),%rbx,%r8
- movq %rbx,(%rsp)
- xorl %edi,%edi
-
- mulxq 8(%rsi),%rax,%r9
-
- mulxq 16(%rsi),%rbx,%r10
- adcxq %rax,%r8
-
- mulxq 24(%rsi),%rax,%r11
- adcxq %rbx,%r9
-
- mulxq 32(%rsi),%rbx,%r12
- adcxq %rax,%r10
-
- mulxq 40(%rsi),%rax,%r13
- adcxq %rbx,%r11
-
- mulxq 48(%rsi),%rbx,%r14
- adcxq %rax,%r12
-
- mulxq 56(%rsi),%rax,%r15
- adcxq %rbx,%r13
- adcxq %rax,%r14
-.byte 0x67
- movq %r8,%rbx
- adcxq %rdi,%r15
-
- movq $-7,%rcx
- jmp .Loop_mulx_gather
-
-.align 32
-.Loop_mulx_gather:
- movdqa 0(%rbp),%xmm8
- movdqa 16(%rbp),%xmm9
- movdqa 32(%rbp),%xmm10
- movdqa 48(%rbp),%xmm11
- pand %xmm0,%xmm8
- movdqa 64(%rbp),%xmm12
- pand %xmm1,%xmm9
- movdqa 80(%rbp),%xmm13
- pand %xmm2,%xmm10
- movdqa 96(%rbp),%xmm14
- pand %xmm3,%xmm11
- movdqa 112(%rbp),%xmm15
- leaq 128(%rbp),%rbp
- pand %xmm4,%xmm12
- pand %xmm5,%xmm13
- pand %xmm6,%xmm14
- pand %xmm7,%xmm15
- por %xmm10,%xmm8
- por %xmm11,%xmm9
- por %xmm12,%xmm8
- por %xmm13,%xmm9
- por %xmm14,%xmm8
- por %xmm15,%xmm9
-
- por %xmm9,%xmm8
- pshufd $0x4e,%xmm8,%xmm9
- por %xmm9,%xmm8
-.byte 102,76,15,126,194
-
-.byte 0xc4,0x62,0xfb,0xf6,0x86,0x00,0x00,0x00,0x00
- adcxq %rax,%rbx
- adoxq %r9,%r8
-
- mulxq 8(%rsi),%rax,%r9
- adcxq %rax,%r8
- adoxq %r10,%r9
-
- mulxq 16(%rsi),%rax,%r10
- adcxq %rax,%r9
- adoxq %r11,%r10
-
-.byte 0xc4,0x62,0xfb,0xf6,0x9e,0x18,0x00,0x00,0x00
- adcxq %rax,%r10
- adoxq %r12,%r11
-
- mulxq 32(%rsi),%rax,%r12
- adcxq %rax,%r11
- adoxq %r13,%r12
-
- mulxq 40(%rsi),%rax,%r13
- adcxq %rax,%r12
- adoxq %r14,%r13
-
-.byte 0xc4,0x62,0xfb,0xf6,0xb6,0x30,0x00,0x00,0x00
- adcxq %rax,%r13
-.byte 0x67
- adoxq %r15,%r14
-
- mulxq 56(%rsi),%rax,%r15
- movq %rbx,64(%rsp,%rcx,8)
- adcxq %rax,%r14
- adoxq %rdi,%r15
- movq %r8,%rbx
- adcxq %rdi,%r15
-
- incq %rcx
- jnz .Loop_mulx_gather
-
- movq %r8,64(%rsp)
- movq %r9,64+8(%rsp)
- movq %r10,64+16(%rsp)
- movq %r11,64+24(%rsp)
- movq %r12,64+32(%rsp)
- movq %r13,64+40(%rsp)
- movq %r14,64+48(%rsp)
- movq %r15,64+56(%rsp)
-
- movq 128(%rsp),%rdx
- movq 128+8(%rsp),%rdi
- movq 128+16(%rsp),%rbp
-
- movq (%rsp),%r8
- movq 8(%rsp),%r9
- movq 16(%rsp),%r10
- movq 24(%rsp),%r11
- movq 32(%rsp),%r12
- movq 40(%rsp),%r13
- movq 48(%rsp),%r14
- movq 56(%rsp),%r15
-
- call __rsaz_512_reducex
-
-.Lmul_gather_tail:
addq 64(%rsp),%r8
adcq 72(%rsp),%r9
adcq 80(%rsp),%r10
@@ -1267,10 +833,6 @@ rsaz_512_mul_scatter4:
movq %rcx,128(%rsp)
movq %rdi,%rbp
- movl $0x80100,%r11d
- andl OPENSSL_ia32cap_P+8(%rip),%r11d
- cmpl $0x80100,%r11d
- je .Lmulx_scatter
movq (%rdi),%rbx
call __rsaz_512_mul
@@ -1287,29 +849,6 @@ rsaz_512_mul_scatter4:
movq 56(%rsp),%r15
call __rsaz_512_reduce
- jmp .Lmul_scatter_tail
-
-.align 32
-.Lmulx_scatter:
- movq (%rdi),%rdx
- call __rsaz_512_mulx
-
-.byte 102,72,15,126,199
-.byte 102,72,15,126,205
-
- movq 128(%rsp),%rdx
- movq (%rsp),%r8
- movq 8(%rsp),%r9
- movq 16(%rsp),%r10
- movq 24(%rsp),%r11
- movq 32(%rsp),%r12
- movq 40(%rsp),%r13
- movq 48(%rsp),%r14
- movq 56(%rsp),%r15
-
- call __rsaz_512_reducex
-
-.Lmul_scatter_tail:
addq 64(%rsp),%r8
adcq 72(%rsp),%r9
adcq 80(%rsp),%r10
@@ -1379,7 +918,6 @@ rsaz_512_mul_by_one:
subq $128+24,%rsp
.cfi_adjust_cfa_offset 128+24
.Lmul_by_one_body:
- movl OPENSSL_ia32cap_P+8(%rip),%eax
movq %rdx,%rbp
movq %rcx,128(%rsp)
@@ -1400,16 +938,7 @@ rsaz_512_mul_by_one:
movdqa %xmm0,64(%rsp)
movdqa %xmm0,80(%rsp)
movdqa %xmm0,96(%rsp)
- andl $0x80100,%eax
- cmpl $0x80100,%eax
- je .Lby_one_callx
call __rsaz_512_reduce
- jmp .Lby_one_tail
-.align 32
-.Lby_one_callx:
- movq 128(%rsp),%rdx
- call __rsaz_512_reducex
-.Lby_one_tail:
movq %r8,(%rdi)
movq %r9,8(%rdi)
movq %r10,16(%rdi)
@@ -1442,6 +971,7 @@ rsaz_512_mul_by_one:
.type __rsaz_512_reduce,@function
.align 32
__rsaz_512_reduce:
+.cfi_startproc
movq %r8,%rbx
imulq 128+8(%rsp),%rbx
movq 0(%rbp),%rax
@@ -1521,66 +1051,12 @@ __rsaz_512_reduce:
jne .Lreduction_loop
.byte 0xf3,0xc3
+.cfi_endproc
.size __rsaz_512_reduce,.-__rsaz_512_reduce
-.type __rsaz_512_reducex,@function
-.align 32
-__rsaz_512_reducex:
-
- imulq %r8,%rdx
- xorq %rsi,%rsi
- movl $8,%ecx
- jmp .Lreduction_loopx
-
-.align 32
-.Lreduction_loopx:
- movq %r8,%rbx
- mulxq 0(%rbp),%rax,%r8
- adcxq %rbx,%rax
- adoxq %r9,%r8
-
- mulxq 8(%rbp),%rax,%r9
- adcxq %rax,%r8
- adoxq %r10,%r9
-
- mulxq 16(%rbp),%rbx,%r10
- adcxq %rbx,%r9
- adoxq %r11,%r10
-
- mulxq 24(%rbp),%rbx,%r11
- adcxq %rbx,%r10
- adoxq %r12,%r11
-
-.byte 0xc4,0x62,0xe3,0xf6,0xa5,0x20,0x00,0x00,0x00
- movq %rdx,%rax
- movq %r8,%rdx
- adcxq %rbx,%r11
- adoxq %r13,%r12
-
- mulxq 128+8(%rsp),%rbx,%rdx
- movq %rax,%rdx
-
- mulxq 40(%rbp),%rax,%r13
- adcxq %rax,%r12
- adoxq %r14,%r13
-
-.byte 0xc4,0x62,0xfb,0xf6,0xb5,0x30,0x00,0x00,0x00
- adcxq %rax,%r13
- adoxq %r15,%r14
-
- mulxq 56(%rbp),%rax,%r15
- movq %rbx,%rdx
- adcxq %rax,%r14
- adoxq %rsi,%r15
- adcxq %rsi,%r15
-
- decl %ecx
- jne .Lreduction_loopx
-
- .byte 0xf3,0xc3
-.size __rsaz_512_reducex,.-__rsaz_512_reducex
.type __rsaz_512_subtract,@function
.align 32
__rsaz_512_subtract:
+.cfi_startproc
movq %r8,(%rdi)
movq %r9,8(%rdi)
movq %r10,16(%rdi)
@@ -1634,10 +1110,12 @@ __rsaz_512_subtract:
movq %r15,56(%rdi)
.byte 0xf3,0xc3
+.cfi_endproc
.size __rsaz_512_subtract,.-__rsaz_512_subtract
.type __rsaz_512_mul,@function
.align 32
__rsaz_512_mul:
+.cfi_startproc
leaq 8(%rsp),%rdi
movq (%rsi),%rax
@@ -1776,131 +1254,13 @@ __rsaz_512_mul:
movq %r15,56(%rdi)
.byte 0xf3,0xc3
+.cfi_endproc
.size __rsaz_512_mul,.-__rsaz_512_mul
-.type __rsaz_512_mulx,@function
-.align 32
-__rsaz_512_mulx:
- mulxq (%rsi),%rbx,%r8
- movq $-6,%rcx
-
- mulxq 8(%rsi),%rax,%r9
- movq %rbx,8(%rsp)
-
- mulxq 16(%rsi),%rbx,%r10
- adcq %rax,%r8
-
- mulxq 24(%rsi),%rax,%r11
- adcq %rbx,%r9
-
- mulxq 32(%rsi),%rbx,%r12
- adcq %rax,%r10
-
- mulxq 40(%rsi),%rax,%r13
- adcq %rbx,%r11
-
- mulxq 48(%rsi),%rbx,%r14
- adcq %rax,%r12
-
- mulxq 56(%rsi),%rax,%r15
- movq 8(%rbp),%rdx
- adcq %rbx,%r13
- adcq %rax,%r14
- adcq $0,%r15
-
- xorq %rdi,%rdi
- jmp .Loop_mulx
-
-.align 32
-.Loop_mulx:
- movq %r8,%rbx
- mulxq (%rsi),%rax,%r8
- adcxq %rax,%rbx
- adoxq %r9,%r8
-
- mulxq 8(%rsi),%rax,%r9
- adcxq %rax,%r8
- adoxq %r10,%r9
-
- mulxq 16(%rsi),%rax,%r10
- adcxq %rax,%r9
- adoxq %r11,%r10
-
- mulxq 24(%rsi),%rax,%r11
- adcxq %rax,%r10
- adoxq %r12,%r11
-
-.byte 0x3e,0xc4,0x62,0xfb,0xf6,0xa6,0x20,0x00,0x00,0x00
- adcxq %rax,%r11
- adoxq %r13,%r12
-
- mulxq 40(%rsi),%rax,%r13
- adcxq %rax,%r12
- adoxq %r14,%r13
-
- mulxq 48(%rsi),%rax,%r14
- adcxq %rax,%r13
- adoxq %r15,%r14
-
- mulxq 56(%rsi),%rax,%r15
- movq 64(%rbp,%rcx,8),%rdx
- movq %rbx,8+64-8(%rsp,%rcx,8)
- adcxq %rax,%r14
- adoxq %rdi,%r15
- adcxq %rdi,%r15
-
- incq %rcx
- jnz .Loop_mulx
-
- movq %r8,%rbx
- mulxq (%rsi),%rax,%r8
- adcxq %rax,%rbx
- adoxq %r9,%r8
-
-.byte 0xc4,0x62,0xfb,0xf6,0x8e,0x08,0x00,0x00,0x00
- adcxq %rax,%r8
- adoxq %r10,%r9
-
-.byte 0xc4,0x62,0xfb,0xf6,0x96,0x10,0x00,0x00,0x00
- adcxq %rax,%r9
- adoxq %r11,%r10
-
- mulxq 24(%rsi),%rax,%r11
- adcxq %rax,%r10
- adoxq %r12,%r11
-
- mulxq 32(%rsi),%rax,%r12
- adcxq %rax,%r11
- adoxq %r13,%r12
-
- mulxq 40(%rsi),%rax,%r13
- adcxq %rax,%r12
- adoxq %r14,%r13
-
-.byte 0xc4,0x62,0xfb,0xf6,0xb6,0x30,0x00,0x00,0x00
- adcxq %rax,%r13
- adoxq %r15,%r14
-
-.byte 0xc4,0x62,0xfb,0xf6,0xbe,0x38,0x00,0x00,0x00
- adcxq %rax,%r14
- adoxq %rdi,%r15
- adcxq %rdi,%r15
-
- movq %rbx,8+64-8(%rsp)
- movq %r8,8+64(%rsp)
- movq %r9,8+64+8(%rsp)
- movq %r10,8+64+16(%rsp)
- movq %r11,8+64+24(%rsp)
- movq %r12,8+64+32(%rsp)
- movq %r13,8+64+40(%rsp)
- movq %r14,8+64+48(%rsp)
- movq %r15,8+64+56(%rsp)
-
- .byte 0xf3,0xc3
-.size __rsaz_512_mulx,.-__rsaz_512_mulx
.globl rsaz_512_scatter4
.type rsaz_512_scatter4,@function
.align 16
rsaz_512_scatter4:
+.cfi_startproc
leaq (%rdi,%rdx,8),%rdi
movl $8,%r9d
jmp .Loop_scatter
@@ -1913,12 +1273,14 @@ rsaz_512_scatter4:
decl %r9d
jnz .Loop_scatter
.byte 0xf3,0xc3
+.cfi_endproc
.size rsaz_512_scatter4,.-rsaz_512_scatter4
.globl rsaz_512_gather4
.type rsaz_512_gather4,@function
.align 16
rsaz_512_gather4:
+.cfi_startproc
movd %edx,%xmm8
movdqa .Linc+16(%rip),%xmm1
movdqa .Linc(%rip),%xmm0
@@ -1982,6 +1344,7 @@ rsaz_512_gather4:
jnz .Loop_gather
.byte 0xf3,0xc3
.LSEH_end_rsaz_512_gather4:
+.cfi_endproc
.size rsaz_512_gather4,.-rsaz_512_gather4
.align 64
diff --git a/secure/lib/libcrypto/amd64/sha1-mb-x86_64.S b/secure/lib/libcrypto/amd64/sha1-mb-x86_64.S
index 0090e020c5732..488e554c247e3 100644
--- a/secure/lib/libcrypto/amd64/sha1-mb-x86_64.S
+++ b/secure/lib/libcrypto/amd64/sha1-mb-x86_64.S
@@ -12,8 +12,6 @@ sha1_multi_block:
movq OPENSSL_ia32cap_P+4(%rip),%rcx
btq $61,%rcx
jc _shaext_shortcut
- testl $268435456,%ecx
- jnz _avx_shortcut
movq %rsp,%rax
.cfi_def_cfa_register %rax
pushq %rbx
@@ -2939,4319 +2937,6 @@ _shaext_shortcut:
.byte 0xf3,0xc3
.cfi_endproc
.size sha1_multi_block_shaext,.-sha1_multi_block_shaext
-.type sha1_multi_block_avx,@function
-.align 32
-sha1_multi_block_avx:
-.cfi_startproc
-_avx_shortcut:
- shrq $32,%rcx
- cmpl $2,%edx
- jb .Lavx
- testl $32,%ecx
- jnz _avx2_shortcut
- jmp .Lavx
-.align 32
-.Lavx:
- movq %rsp,%rax
-.cfi_def_cfa_register %rax
- pushq %rbx
-.cfi_offset %rbx,-16
- pushq %rbp
-.cfi_offset %rbp,-24
- subq $288,%rsp
- andq $-256,%rsp
- movq %rax,272(%rsp)
-.cfi_escape 0x0f,0x06,0x77,0x90,0x02,0x06,0x23,0x08
-.Lbody_avx:
- leaq K_XX_XX(%rip),%rbp
- leaq 256(%rsp),%rbx
-
- vzeroupper
-.Loop_grande_avx:
- movl %edx,280(%rsp)
- xorl %edx,%edx
- movq 0(%rsi),%r8
- movl 8(%rsi),%ecx
- cmpl %edx,%ecx
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- movl %ecx,0(%rbx)
- cmovleq %rbp,%r8
- movq 16(%rsi),%r9
- movl 24(%rsi),%ecx
- cmpl %edx,%ecx
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- movl %ecx,4(%rbx)
- cmovleq %rbp,%r9
- movq 32(%rsi),%r10
- movl 40(%rsi),%ecx
- cmpl %edx,%ecx
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- movl %ecx,8(%rbx)
- cmovleq %rbp,%r10
- movq 48(%rsi),%r11
- movl 56(%rsi),%ecx
- cmpl %edx,%ecx
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- movl %ecx,12(%rbx)
- cmovleq %rbp,%r11
- testl %edx,%edx
- jz .Ldone_avx
-
- vmovdqu 0(%rdi),%xmm10
- leaq 128(%rsp),%rax
- vmovdqu 32(%rdi),%xmm11
- vmovdqu 64(%rdi),%xmm12
- vmovdqu 96(%rdi),%xmm13
- vmovdqu 128(%rdi),%xmm14
- vmovdqu 96(%rbp),%xmm5
- jmp .Loop_avx
-
-.align 32
-.Loop_avx:
- vmovdqa -32(%rbp),%xmm15
- vmovd (%r8),%xmm0
- leaq 64(%r8),%r8
- vmovd (%r9),%xmm2
- leaq 64(%r9),%r9
- vpinsrd $1,(%r10),%xmm0,%xmm0
- leaq 64(%r10),%r10
- vpinsrd $1,(%r11),%xmm2,%xmm2
- leaq 64(%r11),%r11
- vmovd -60(%r8),%xmm1
- vpunpckldq %xmm2,%xmm0,%xmm0
- vmovd -60(%r9),%xmm9
- vpshufb %xmm5,%xmm0,%xmm0
- vpinsrd $1,-60(%r10),%xmm1,%xmm1
- vpinsrd $1,-60(%r11),%xmm9,%xmm9
- vpaddd %xmm15,%xmm14,%xmm14
- vpslld $5,%xmm10,%xmm8
- vpandn %xmm13,%xmm11,%xmm7
- vpand %xmm12,%xmm11,%xmm6
-
- vmovdqa %xmm0,0-128(%rax)
- vpaddd %xmm0,%xmm14,%xmm14
- vpunpckldq %xmm9,%xmm1,%xmm1
- vpsrld $27,%xmm10,%xmm9
- vpxor %xmm7,%xmm6,%xmm6
- vmovd -56(%r8),%xmm2
-
- vpslld $30,%xmm11,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vmovd -56(%r9),%xmm9
- vpaddd %xmm6,%xmm14,%xmm14
-
- vpsrld $2,%xmm11,%xmm11
- vpaddd %xmm8,%xmm14,%xmm14
- vpshufb %xmm5,%xmm1,%xmm1
- vpor %xmm7,%xmm11,%xmm11
- vpinsrd $1,-56(%r10),%xmm2,%xmm2
- vpinsrd $1,-56(%r11),%xmm9,%xmm9
- vpaddd %xmm15,%xmm13,%xmm13
- vpslld $5,%xmm14,%xmm8
- vpandn %xmm12,%xmm10,%xmm7
- vpand %xmm11,%xmm10,%xmm6
-
- vmovdqa %xmm1,16-128(%rax)
- vpaddd %xmm1,%xmm13,%xmm13
- vpunpckldq %xmm9,%xmm2,%xmm2
- vpsrld $27,%xmm14,%xmm9
- vpxor %xmm7,%xmm6,%xmm6
- vmovd -52(%r8),%xmm3
-
- vpslld $30,%xmm10,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vmovd -52(%r9),%xmm9
- vpaddd %xmm6,%xmm13,%xmm13
-
- vpsrld $2,%xmm10,%xmm10
- vpaddd %xmm8,%xmm13,%xmm13
- vpshufb %xmm5,%xmm2,%xmm2
- vpor %xmm7,%xmm10,%xmm10
- vpinsrd $1,-52(%r10),%xmm3,%xmm3
- vpinsrd $1,-52(%r11),%xmm9,%xmm9
- vpaddd %xmm15,%xmm12,%xmm12
- vpslld $5,%xmm13,%xmm8
- vpandn %xmm11,%xmm14,%xmm7
- vpand %xmm10,%xmm14,%xmm6
-
- vmovdqa %xmm2,32-128(%rax)
- vpaddd %xmm2,%xmm12,%xmm12
- vpunpckldq %xmm9,%xmm3,%xmm3
- vpsrld $27,%xmm13,%xmm9
- vpxor %xmm7,%xmm6,%xmm6
- vmovd -48(%r8),%xmm4
-
- vpslld $30,%xmm14,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vmovd -48(%r9),%xmm9
- vpaddd %xmm6,%xmm12,%xmm12
-
- vpsrld $2,%xmm14,%xmm14
- vpaddd %xmm8,%xmm12,%xmm12
- vpshufb %xmm5,%xmm3,%xmm3
- vpor %xmm7,%xmm14,%xmm14
- vpinsrd $1,-48(%r10),%xmm4,%xmm4
- vpinsrd $1,-48(%r11),%xmm9,%xmm9
- vpaddd %xmm15,%xmm11,%xmm11
- vpslld $5,%xmm12,%xmm8
- vpandn %xmm10,%xmm13,%xmm7
- vpand %xmm14,%xmm13,%xmm6
-
- vmovdqa %xmm3,48-128(%rax)
- vpaddd %xmm3,%xmm11,%xmm11
- vpunpckldq %xmm9,%xmm4,%xmm4
- vpsrld $27,%xmm12,%xmm9
- vpxor %xmm7,%xmm6,%xmm6
- vmovd -44(%r8),%xmm0
-
- vpslld $30,%xmm13,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vmovd -44(%r9),%xmm9
- vpaddd %xmm6,%xmm11,%xmm11
-
- vpsrld $2,%xmm13,%xmm13
- vpaddd %xmm8,%xmm11,%xmm11
- vpshufb %xmm5,%xmm4,%xmm4
- vpor %xmm7,%xmm13,%xmm13
- vpinsrd $1,-44(%r10),%xmm0,%xmm0
- vpinsrd $1,-44(%r11),%xmm9,%xmm9
- vpaddd %xmm15,%xmm10,%xmm10
- vpslld $5,%xmm11,%xmm8
- vpandn %xmm14,%xmm12,%xmm7
- vpand %xmm13,%xmm12,%xmm6
-
- vmovdqa %xmm4,64-128(%rax)
- vpaddd %xmm4,%xmm10,%xmm10
- vpunpckldq %xmm9,%xmm0,%xmm0
- vpsrld $27,%xmm11,%xmm9
- vpxor %xmm7,%xmm6,%xmm6
- vmovd -40(%r8),%xmm1
-
- vpslld $30,%xmm12,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vmovd -40(%r9),%xmm9
- vpaddd %xmm6,%xmm10,%xmm10
-
- vpsrld $2,%xmm12,%xmm12
- vpaddd %xmm8,%xmm10,%xmm10
- vpshufb %xmm5,%xmm0,%xmm0
- vpor %xmm7,%xmm12,%xmm12
- vpinsrd $1,-40(%r10),%xmm1,%xmm1
- vpinsrd $1,-40(%r11),%xmm9,%xmm9
- vpaddd %xmm15,%xmm14,%xmm14
- vpslld $5,%xmm10,%xmm8
- vpandn %xmm13,%xmm11,%xmm7
- vpand %xmm12,%xmm11,%xmm6
-
- vmovdqa %xmm0,80-128(%rax)
- vpaddd %xmm0,%xmm14,%xmm14
- vpunpckldq %xmm9,%xmm1,%xmm1
- vpsrld $27,%xmm10,%xmm9
- vpxor %xmm7,%xmm6,%xmm6
- vmovd -36(%r8),%xmm2
-
- vpslld $30,%xmm11,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vmovd -36(%r9),%xmm9
- vpaddd %xmm6,%xmm14,%xmm14
-
- vpsrld $2,%xmm11,%xmm11
- vpaddd %xmm8,%xmm14,%xmm14
- vpshufb %xmm5,%xmm1,%xmm1
- vpor %xmm7,%xmm11,%xmm11
- vpinsrd $1,-36(%r10),%xmm2,%xmm2
- vpinsrd $1,-36(%r11),%xmm9,%xmm9
- vpaddd %xmm15,%xmm13,%xmm13
- vpslld $5,%xmm14,%xmm8
- vpandn %xmm12,%xmm10,%xmm7
- vpand %xmm11,%xmm10,%xmm6
-
- vmovdqa %xmm1,96-128(%rax)
- vpaddd %xmm1,%xmm13,%xmm13
- vpunpckldq %xmm9,%xmm2,%xmm2
- vpsrld $27,%xmm14,%xmm9
- vpxor %xmm7,%xmm6,%xmm6
- vmovd -32(%r8),%xmm3
-
- vpslld $30,%xmm10,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vmovd -32(%r9),%xmm9
- vpaddd %xmm6,%xmm13,%xmm13
-
- vpsrld $2,%xmm10,%xmm10
- vpaddd %xmm8,%xmm13,%xmm13
- vpshufb %xmm5,%xmm2,%xmm2
- vpor %xmm7,%xmm10,%xmm10
- vpinsrd $1,-32(%r10),%xmm3,%xmm3
- vpinsrd $1,-32(%r11),%xmm9,%xmm9
- vpaddd %xmm15,%xmm12,%xmm12
- vpslld $5,%xmm13,%xmm8
- vpandn %xmm11,%xmm14,%xmm7
- vpand %xmm10,%xmm14,%xmm6
-
- vmovdqa %xmm2,112-128(%rax)
- vpaddd %xmm2,%xmm12,%xmm12
- vpunpckldq %xmm9,%xmm3,%xmm3
- vpsrld $27,%xmm13,%xmm9
- vpxor %xmm7,%xmm6,%xmm6
- vmovd -28(%r8),%xmm4
-
- vpslld $30,%xmm14,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vmovd -28(%r9),%xmm9
- vpaddd %xmm6,%xmm12,%xmm12
-
- vpsrld $2,%xmm14,%xmm14
- vpaddd %xmm8,%xmm12,%xmm12
- vpshufb %xmm5,%xmm3,%xmm3
- vpor %xmm7,%xmm14,%xmm14
- vpinsrd $1,-28(%r10),%xmm4,%xmm4
- vpinsrd $1,-28(%r11),%xmm9,%xmm9
- vpaddd %xmm15,%xmm11,%xmm11
- vpslld $5,%xmm12,%xmm8
- vpandn %xmm10,%xmm13,%xmm7
- vpand %xmm14,%xmm13,%xmm6
-
- vmovdqa %xmm3,128-128(%rax)
- vpaddd %xmm3,%xmm11,%xmm11
- vpunpckldq %xmm9,%xmm4,%xmm4
- vpsrld $27,%xmm12,%xmm9
- vpxor %xmm7,%xmm6,%xmm6
- vmovd -24(%r8),%xmm0
-
- vpslld $30,%xmm13,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vmovd -24(%r9),%xmm9
- vpaddd %xmm6,%xmm11,%xmm11
-
- vpsrld $2,%xmm13,%xmm13
- vpaddd %xmm8,%xmm11,%xmm11
- vpshufb %xmm5,%xmm4,%xmm4
- vpor %xmm7,%xmm13,%xmm13
- vpinsrd $1,-24(%r10),%xmm0,%xmm0
- vpinsrd $1,-24(%r11),%xmm9,%xmm9
- vpaddd %xmm15,%xmm10,%xmm10
- vpslld $5,%xmm11,%xmm8
- vpandn %xmm14,%xmm12,%xmm7
- vpand %xmm13,%xmm12,%xmm6
-
- vmovdqa %xmm4,144-128(%rax)
- vpaddd %xmm4,%xmm10,%xmm10
- vpunpckldq %xmm9,%xmm0,%xmm0
- vpsrld $27,%xmm11,%xmm9
- vpxor %xmm7,%xmm6,%xmm6
- vmovd -20(%r8),%xmm1
-
- vpslld $30,%xmm12,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vmovd -20(%r9),%xmm9
- vpaddd %xmm6,%xmm10,%xmm10
-
- vpsrld $2,%xmm12,%xmm12
- vpaddd %xmm8,%xmm10,%xmm10
- vpshufb %xmm5,%xmm0,%xmm0
- vpor %xmm7,%xmm12,%xmm12
- vpinsrd $1,-20(%r10),%xmm1,%xmm1
- vpinsrd $1,-20(%r11),%xmm9,%xmm9
- vpaddd %xmm15,%xmm14,%xmm14
- vpslld $5,%xmm10,%xmm8
- vpandn %xmm13,%xmm11,%xmm7
- vpand %xmm12,%xmm11,%xmm6
-
- vmovdqa %xmm0,160-128(%rax)
- vpaddd %xmm0,%xmm14,%xmm14
- vpunpckldq %xmm9,%xmm1,%xmm1
- vpsrld $27,%xmm10,%xmm9
- vpxor %xmm7,%xmm6,%xmm6
- vmovd -16(%r8),%xmm2
-
- vpslld $30,%xmm11,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vmovd -16(%r9),%xmm9
- vpaddd %xmm6,%xmm14,%xmm14
-
- vpsrld $2,%xmm11,%xmm11
- vpaddd %xmm8,%xmm14,%xmm14
- vpshufb %xmm5,%xmm1,%xmm1
- vpor %xmm7,%xmm11,%xmm11
- vpinsrd $1,-16(%r10),%xmm2,%xmm2
- vpinsrd $1,-16(%r11),%xmm9,%xmm9
- vpaddd %xmm15,%xmm13,%xmm13
- vpslld $5,%xmm14,%xmm8
- vpandn %xmm12,%xmm10,%xmm7
- vpand %xmm11,%xmm10,%xmm6
-
- vmovdqa %xmm1,176-128(%rax)
- vpaddd %xmm1,%xmm13,%xmm13
- vpunpckldq %xmm9,%xmm2,%xmm2
- vpsrld $27,%xmm14,%xmm9
- vpxor %xmm7,%xmm6,%xmm6
- vmovd -12(%r8),%xmm3
-
- vpslld $30,%xmm10,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vmovd -12(%r9),%xmm9
- vpaddd %xmm6,%xmm13,%xmm13
-
- vpsrld $2,%xmm10,%xmm10
- vpaddd %xmm8,%xmm13,%xmm13
- vpshufb %xmm5,%xmm2,%xmm2
- vpor %xmm7,%xmm10,%xmm10
- vpinsrd $1,-12(%r10),%xmm3,%xmm3
- vpinsrd $1,-12(%r11),%xmm9,%xmm9
- vpaddd %xmm15,%xmm12,%xmm12
- vpslld $5,%xmm13,%xmm8
- vpandn %xmm11,%xmm14,%xmm7
- vpand %xmm10,%xmm14,%xmm6
-
- vmovdqa %xmm2,192-128(%rax)
- vpaddd %xmm2,%xmm12,%xmm12
- vpunpckldq %xmm9,%xmm3,%xmm3
- vpsrld $27,%xmm13,%xmm9
- vpxor %xmm7,%xmm6,%xmm6
- vmovd -8(%r8),%xmm4
-
- vpslld $30,%xmm14,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vmovd -8(%r9),%xmm9
- vpaddd %xmm6,%xmm12,%xmm12
-
- vpsrld $2,%xmm14,%xmm14
- vpaddd %xmm8,%xmm12,%xmm12
- vpshufb %xmm5,%xmm3,%xmm3
- vpor %xmm7,%xmm14,%xmm14
- vpinsrd $1,-8(%r10),%xmm4,%xmm4
- vpinsrd $1,-8(%r11),%xmm9,%xmm9
- vpaddd %xmm15,%xmm11,%xmm11
- vpslld $5,%xmm12,%xmm8
- vpandn %xmm10,%xmm13,%xmm7
- vpand %xmm14,%xmm13,%xmm6
-
- vmovdqa %xmm3,208-128(%rax)
- vpaddd %xmm3,%xmm11,%xmm11
- vpunpckldq %xmm9,%xmm4,%xmm4
- vpsrld $27,%xmm12,%xmm9
- vpxor %xmm7,%xmm6,%xmm6
- vmovd -4(%r8),%xmm0
-
- vpslld $30,%xmm13,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vmovd -4(%r9),%xmm9
- vpaddd %xmm6,%xmm11,%xmm11
-
- vpsrld $2,%xmm13,%xmm13
- vpaddd %xmm8,%xmm11,%xmm11
- vpshufb %xmm5,%xmm4,%xmm4
- vpor %xmm7,%xmm13,%xmm13
- vmovdqa 0-128(%rax),%xmm1
- vpinsrd $1,-4(%r10),%xmm0,%xmm0
- vpinsrd $1,-4(%r11),%xmm9,%xmm9
- vpaddd %xmm15,%xmm10,%xmm10
- prefetcht0 63(%r8)
- vpslld $5,%xmm11,%xmm8
- vpandn %xmm14,%xmm12,%xmm7
- vpand %xmm13,%xmm12,%xmm6
-
- vmovdqa %xmm4,224-128(%rax)
- vpaddd %xmm4,%xmm10,%xmm10
- vpunpckldq %xmm9,%xmm0,%xmm0
- vpsrld $27,%xmm11,%xmm9
- prefetcht0 63(%r9)
- vpxor %xmm7,%xmm6,%xmm6
-
- vpslld $30,%xmm12,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- prefetcht0 63(%r10)
- vpaddd %xmm6,%xmm10,%xmm10
-
- vpsrld $2,%xmm12,%xmm12
- vpaddd %xmm8,%xmm10,%xmm10
- prefetcht0 63(%r11)
- vpshufb %xmm5,%xmm0,%xmm0
- vpor %xmm7,%xmm12,%xmm12
- vmovdqa 16-128(%rax),%xmm2
- vpxor %xmm3,%xmm1,%xmm1
- vmovdqa 32-128(%rax),%xmm3
-
- vpaddd %xmm15,%xmm14,%xmm14
- vpslld $5,%xmm10,%xmm8
- vpandn %xmm13,%xmm11,%xmm7
-
- vpand %xmm12,%xmm11,%xmm6
-
- vmovdqa %xmm0,240-128(%rax)
- vpaddd %xmm0,%xmm14,%xmm14
- vpxor 128-128(%rax),%xmm1,%xmm1
- vpsrld $27,%xmm10,%xmm9
- vpxor %xmm7,%xmm6,%xmm6
- vpxor %xmm3,%xmm1,%xmm1
-
-
- vpslld $30,%xmm11,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm14,%xmm14
-
- vpsrld $31,%xmm1,%xmm5
- vpaddd %xmm1,%xmm1,%xmm1
-
- vpsrld $2,%xmm11,%xmm11
-
- vpaddd %xmm8,%xmm14,%xmm14
- vpor %xmm5,%xmm1,%xmm1
- vpor %xmm7,%xmm11,%xmm11
- vpxor %xmm4,%xmm2,%xmm2
- vmovdqa 48-128(%rax),%xmm4
-
- vpaddd %xmm15,%xmm13,%xmm13
- vpslld $5,%xmm14,%xmm8
- vpandn %xmm12,%xmm10,%xmm7
-
- vpand %xmm11,%xmm10,%xmm6
-
- vmovdqa %xmm1,0-128(%rax)
- vpaddd %xmm1,%xmm13,%xmm13
- vpxor 144-128(%rax),%xmm2,%xmm2
- vpsrld $27,%xmm14,%xmm9
- vpxor %xmm7,%xmm6,%xmm6
- vpxor %xmm4,%xmm2,%xmm2
-
-
- vpslld $30,%xmm10,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm13,%xmm13
-
- vpsrld $31,%xmm2,%xmm5
- vpaddd %xmm2,%xmm2,%xmm2
-
- vpsrld $2,%xmm10,%xmm10
-
- vpaddd %xmm8,%xmm13,%xmm13
- vpor %xmm5,%xmm2,%xmm2
- vpor %xmm7,%xmm10,%xmm10
- vpxor %xmm0,%xmm3,%xmm3
- vmovdqa 64-128(%rax),%xmm0
-
- vpaddd %xmm15,%xmm12,%xmm12
- vpslld $5,%xmm13,%xmm8
- vpandn %xmm11,%xmm14,%xmm7
-
- vpand %xmm10,%xmm14,%xmm6
-
- vmovdqa %xmm2,16-128(%rax)
- vpaddd %xmm2,%xmm12,%xmm12
- vpxor 160-128(%rax),%xmm3,%xmm3
- vpsrld $27,%xmm13,%xmm9
- vpxor %xmm7,%xmm6,%xmm6
- vpxor %xmm0,%xmm3,%xmm3
-
-
- vpslld $30,%xmm14,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm12,%xmm12
-
- vpsrld $31,%xmm3,%xmm5
- vpaddd %xmm3,%xmm3,%xmm3
-
- vpsrld $2,%xmm14,%xmm14
-
- vpaddd %xmm8,%xmm12,%xmm12
- vpor %xmm5,%xmm3,%xmm3
- vpor %xmm7,%xmm14,%xmm14
- vpxor %xmm1,%xmm4,%xmm4
- vmovdqa 80-128(%rax),%xmm1
-
- vpaddd %xmm15,%xmm11,%xmm11
- vpslld $5,%xmm12,%xmm8
- vpandn %xmm10,%xmm13,%xmm7
-
- vpand %xmm14,%xmm13,%xmm6
-
- vmovdqa %xmm3,32-128(%rax)
- vpaddd %xmm3,%xmm11,%xmm11
- vpxor 176-128(%rax),%xmm4,%xmm4
- vpsrld $27,%xmm12,%xmm9
- vpxor %xmm7,%xmm6,%xmm6
- vpxor %xmm1,%xmm4,%xmm4
-
-
- vpslld $30,%xmm13,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm11,%xmm11
-
- vpsrld $31,%xmm4,%xmm5
- vpaddd %xmm4,%xmm4,%xmm4
-
- vpsrld $2,%xmm13,%xmm13
-
- vpaddd %xmm8,%xmm11,%xmm11
- vpor %xmm5,%xmm4,%xmm4
- vpor %xmm7,%xmm13,%xmm13
- vpxor %xmm2,%xmm0,%xmm0
- vmovdqa 96-128(%rax),%xmm2
-
- vpaddd %xmm15,%xmm10,%xmm10
- vpslld $5,%xmm11,%xmm8
- vpandn %xmm14,%xmm12,%xmm7
-
- vpand %xmm13,%xmm12,%xmm6
-
- vmovdqa %xmm4,48-128(%rax)
- vpaddd %xmm4,%xmm10,%xmm10
- vpxor 192-128(%rax),%xmm0,%xmm0
- vpsrld $27,%xmm11,%xmm9
- vpxor %xmm7,%xmm6,%xmm6
- vpxor %xmm2,%xmm0,%xmm0
-
-
- vpslld $30,%xmm12,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm10,%xmm10
-
- vpsrld $31,%xmm0,%xmm5
- vpaddd %xmm0,%xmm0,%xmm0
-
- vpsrld $2,%xmm12,%xmm12
-
- vpaddd %xmm8,%xmm10,%xmm10
- vpor %xmm5,%xmm0,%xmm0
- vpor %xmm7,%xmm12,%xmm12
- vmovdqa 0(%rbp),%xmm15
- vpxor %xmm3,%xmm1,%xmm1
- vmovdqa 112-128(%rax),%xmm3
-
- vpslld $5,%xmm10,%xmm8
- vpaddd %xmm15,%xmm14,%xmm14
- vpxor %xmm11,%xmm13,%xmm6
- vmovdqa %xmm0,64-128(%rax)
- vpaddd %xmm0,%xmm14,%xmm14
- vpxor 208-128(%rax),%xmm1,%xmm1
- vpsrld $27,%xmm10,%xmm9
- vpxor %xmm12,%xmm6,%xmm6
- vpxor %xmm3,%xmm1,%xmm1
-
- vpslld $30,%xmm11,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm14,%xmm14
- vpsrld $31,%xmm1,%xmm5
- vpaddd %xmm1,%xmm1,%xmm1
-
- vpsrld $2,%xmm11,%xmm11
- vpaddd %xmm8,%xmm14,%xmm14
- vpor %xmm5,%xmm1,%xmm1
- vpor %xmm7,%xmm11,%xmm11
- vpxor %xmm4,%xmm2,%xmm2
- vmovdqa 128-128(%rax),%xmm4
-
- vpslld $5,%xmm14,%xmm8
- vpaddd %xmm15,%xmm13,%xmm13
- vpxor %xmm10,%xmm12,%xmm6
- vmovdqa %xmm1,80-128(%rax)
- vpaddd %xmm1,%xmm13,%xmm13
- vpxor 224-128(%rax),%xmm2,%xmm2
- vpsrld $27,%xmm14,%xmm9
- vpxor %xmm11,%xmm6,%xmm6
- vpxor %xmm4,%xmm2,%xmm2
-
- vpslld $30,%xmm10,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm13,%xmm13
- vpsrld $31,%xmm2,%xmm5
- vpaddd %xmm2,%xmm2,%xmm2
-
- vpsrld $2,%xmm10,%xmm10
- vpaddd %xmm8,%xmm13,%xmm13
- vpor %xmm5,%xmm2,%xmm2
- vpor %xmm7,%xmm10,%xmm10
- vpxor %xmm0,%xmm3,%xmm3
- vmovdqa 144-128(%rax),%xmm0
-
- vpslld $5,%xmm13,%xmm8
- vpaddd %xmm15,%xmm12,%xmm12
- vpxor %xmm14,%xmm11,%xmm6
- vmovdqa %xmm2,96-128(%rax)
- vpaddd %xmm2,%xmm12,%xmm12
- vpxor 240-128(%rax),%xmm3,%xmm3
- vpsrld $27,%xmm13,%xmm9
- vpxor %xmm10,%xmm6,%xmm6
- vpxor %xmm0,%xmm3,%xmm3
-
- vpslld $30,%xmm14,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm12,%xmm12
- vpsrld $31,%xmm3,%xmm5
- vpaddd %xmm3,%xmm3,%xmm3
-
- vpsrld $2,%xmm14,%xmm14
- vpaddd %xmm8,%xmm12,%xmm12
- vpor %xmm5,%xmm3,%xmm3
- vpor %xmm7,%xmm14,%xmm14
- vpxor %xmm1,%xmm4,%xmm4
- vmovdqa 160-128(%rax),%xmm1
-
- vpslld $5,%xmm12,%xmm8
- vpaddd %xmm15,%xmm11,%xmm11
- vpxor %xmm13,%xmm10,%xmm6
- vmovdqa %xmm3,112-128(%rax)
- vpaddd %xmm3,%xmm11,%xmm11
- vpxor 0-128(%rax),%xmm4,%xmm4
- vpsrld $27,%xmm12,%xmm9
- vpxor %xmm14,%xmm6,%xmm6
- vpxor %xmm1,%xmm4,%xmm4
-
- vpslld $30,%xmm13,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm11,%xmm11
- vpsrld $31,%xmm4,%xmm5
- vpaddd %xmm4,%xmm4,%xmm4
-
- vpsrld $2,%xmm13,%xmm13
- vpaddd %xmm8,%xmm11,%xmm11
- vpor %xmm5,%xmm4,%xmm4
- vpor %xmm7,%xmm13,%xmm13
- vpxor %xmm2,%xmm0,%xmm0
- vmovdqa 176-128(%rax),%xmm2
-
- vpslld $5,%xmm11,%xmm8
- vpaddd %xmm15,%xmm10,%xmm10
- vpxor %xmm12,%xmm14,%xmm6
- vmovdqa %xmm4,128-128(%rax)
- vpaddd %xmm4,%xmm10,%xmm10
- vpxor 16-128(%rax),%xmm0,%xmm0
- vpsrld $27,%xmm11,%xmm9
- vpxor %xmm13,%xmm6,%xmm6
- vpxor %xmm2,%xmm0,%xmm0
-
- vpslld $30,%xmm12,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm10,%xmm10
- vpsrld $31,%xmm0,%xmm5
- vpaddd %xmm0,%xmm0,%xmm0
-
- vpsrld $2,%xmm12,%xmm12
- vpaddd %xmm8,%xmm10,%xmm10
- vpor %xmm5,%xmm0,%xmm0
- vpor %xmm7,%xmm12,%xmm12
- vpxor %xmm3,%xmm1,%xmm1
- vmovdqa 192-128(%rax),%xmm3
-
- vpslld $5,%xmm10,%xmm8
- vpaddd %xmm15,%xmm14,%xmm14
- vpxor %xmm11,%xmm13,%xmm6
- vmovdqa %xmm0,144-128(%rax)
- vpaddd %xmm0,%xmm14,%xmm14
- vpxor 32-128(%rax),%xmm1,%xmm1
- vpsrld $27,%xmm10,%xmm9
- vpxor %xmm12,%xmm6,%xmm6
- vpxor %xmm3,%xmm1,%xmm1
-
- vpslld $30,%xmm11,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm14,%xmm14
- vpsrld $31,%xmm1,%xmm5
- vpaddd %xmm1,%xmm1,%xmm1
-
- vpsrld $2,%xmm11,%xmm11
- vpaddd %xmm8,%xmm14,%xmm14
- vpor %xmm5,%xmm1,%xmm1
- vpor %xmm7,%xmm11,%xmm11
- vpxor %xmm4,%xmm2,%xmm2
- vmovdqa 208-128(%rax),%xmm4
-
- vpslld $5,%xmm14,%xmm8
- vpaddd %xmm15,%xmm13,%xmm13
- vpxor %xmm10,%xmm12,%xmm6
- vmovdqa %xmm1,160-128(%rax)
- vpaddd %xmm1,%xmm13,%xmm13
- vpxor 48-128(%rax),%xmm2,%xmm2
- vpsrld $27,%xmm14,%xmm9
- vpxor %xmm11,%xmm6,%xmm6
- vpxor %xmm4,%xmm2,%xmm2
-
- vpslld $30,%xmm10,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm13,%xmm13
- vpsrld $31,%xmm2,%xmm5
- vpaddd %xmm2,%xmm2,%xmm2
-
- vpsrld $2,%xmm10,%xmm10
- vpaddd %xmm8,%xmm13,%xmm13
- vpor %xmm5,%xmm2,%xmm2
- vpor %xmm7,%xmm10,%xmm10
- vpxor %xmm0,%xmm3,%xmm3
- vmovdqa 224-128(%rax),%xmm0
-
- vpslld $5,%xmm13,%xmm8
- vpaddd %xmm15,%xmm12,%xmm12
- vpxor %xmm14,%xmm11,%xmm6
- vmovdqa %xmm2,176-128(%rax)
- vpaddd %xmm2,%xmm12,%xmm12
- vpxor 64-128(%rax),%xmm3,%xmm3
- vpsrld $27,%xmm13,%xmm9
- vpxor %xmm10,%xmm6,%xmm6
- vpxor %xmm0,%xmm3,%xmm3
-
- vpslld $30,%xmm14,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm12,%xmm12
- vpsrld $31,%xmm3,%xmm5
- vpaddd %xmm3,%xmm3,%xmm3
-
- vpsrld $2,%xmm14,%xmm14
- vpaddd %xmm8,%xmm12,%xmm12
- vpor %xmm5,%xmm3,%xmm3
- vpor %xmm7,%xmm14,%xmm14
- vpxor %xmm1,%xmm4,%xmm4
- vmovdqa 240-128(%rax),%xmm1
-
- vpslld $5,%xmm12,%xmm8
- vpaddd %xmm15,%xmm11,%xmm11
- vpxor %xmm13,%xmm10,%xmm6
- vmovdqa %xmm3,192-128(%rax)
- vpaddd %xmm3,%xmm11,%xmm11
- vpxor 80-128(%rax),%xmm4,%xmm4
- vpsrld $27,%xmm12,%xmm9
- vpxor %xmm14,%xmm6,%xmm6
- vpxor %xmm1,%xmm4,%xmm4
-
- vpslld $30,%xmm13,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm11,%xmm11
- vpsrld $31,%xmm4,%xmm5
- vpaddd %xmm4,%xmm4,%xmm4
-
- vpsrld $2,%xmm13,%xmm13
- vpaddd %xmm8,%xmm11,%xmm11
- vpor %xmm5,%xmm4,%xmm4
- vpor %xmm7,%xmm13,%xmm13
- vpxor %xmm2,%xmm0,%xmm0
- vmovdqa 0-128(%rax),%xmm2
-
- vpslld $5,%xmm11,%xmm8
- vpaddd %xmm15,%xmm10,%xmm10
- vpxor %xmm12,%xmm14,%xmm6
- vmovdqa %xmm4,208-128(%rax)
- vpaddd %xmm4,%xmm10,%xmm10
- vpxor 96-128(%rax),%xmm0,%xmm0
- vpsrld $27,%xmm11,%xmm9
- vpxor %xmm13,%xmm6,%xmm6
- vpxor %xmm2,%xmm0,%xmm0
-
- vpslld $30,%xmm12,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm10,%xmm10
- vpsrld $31,%xmm0,%xmm5
- vpaddd %xmm0,%xmm0,%xmm0
-
- vpsrld $2,%xmm12,%xmm12
- vpaddd %xmm8,%xmm10,%xmm10
- vpor %xmm5,%xmm0,%xmm0
- vpor %xmm7,%xmm12,%xmm12
- vpxor %xmm3,%xmm1,%xmm1
- vmovdqa 16-128(%rax),%xmm3
-
- vpslld $5,%xmm10,%xmm8
- vpaddd %xmm15,%xmm14,%xmm14
- vpxor %xmm11,%xmm13,%xmm6
- vmovdqa %xmm0,224-128(%rax)
- vpaddd %xmm0,%xmm14,%xmm14
- vpxor 112-128(%rax),%xmm1,%xmm1
- vpsrld $27,%xmm10,%xmm9
- vpxor %xmm12,%xmm6,%xmm6
- vpxor %xmm3,%xmm1,%xmm1
-
- vpslld $30,%xmm11,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm14,%xmm14
- vpsrld $31,%xmm1,%xmm5
- vpaddd %xmm1,%xmm1,%xmm1
-
- vpsrld $2,%xmm11,%xmm11
- vpaddd %xmm8,%xmm14,%xmm14
- vpor %xmm5,%xmm1,%xmm1
- vpor %xmm7,%xmm11,%xmm11
- vpxor %xmm4,%xmm2,%xmm2
- vmovdqa 32-128(%rax),%xmm4
-
- vpslld $5,%xmm14,%xmm8
- vpaddd %xmm15,%xmm13,%xmm13
- vpxor %xmm10,%xmm12,%xmm6
- vmovdqa %xmm1,240-128(%rax)
- vpaddd %xmm1,%xmm13,%xmm13
- vpxor 128-128(%rax),%xmm2,%xmm2
- vpsrld $27,%xmm14,%xmm9
- vpxor %xmm11,%xmm6,%xmm6
- vpxor %xmm4,%xmm2,%xmm2
-
- vpslld $30,%xmm10,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm13,%xmm13
- vpsrld $31,%xmm2,%xmm5
- vpaddd %xmm2,%xmm2,%xmm2
-
- vpsrld $2,%xmm10,%xmm10
- vpaddd %xmm8,%xmm13,%xmm13
- vpor %xmm5,%xmm2,%xmm2
- vpor %xmm7,%xmm10,%xmm10
- vpxor %xmm0,%xmm3,%xmm3
- vmovdqa 48-128(%rax),%xmm0
-
- vpslld $5,%xmm13,%xmm8
- vpaddd %xmm15,%xmm12,%xmm12
- vpxor %xmm14,%xmm11,%xmm6
- vmovdqa %xmm2,0-128(%rax)
- vpaddd %xmm2,%xmm12,%xmm12
- vpxor 144-128(%rax),%xmm3,%xmm3
- vpsrld $27,%xmm13,%xmm9
- vpxor %xmm10,%xmm6,%xmm6
- vpxor %xmm0,%xmm3,%xmm3
-
- vpslld $30,%xmm14,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm12,%xmm12
- vpsrld $31,%xmm3,%xmm5
- vpaddd %xmm3,%xmm3,%xmm3
-
- vpsrld $2,%xmm14,%xmm14
- vpaddd %xmm8,%xmm12,%xmm12
- vpor %xmm5,%xmm3,%xmm3
- vpor %xmm7,%xmm14,%xmm14
- vpxor %xmm1,%xmm4,%xmm4
- vmovdqa 64-128(%rax),%xmm1
-
- vpslld $5,%xmm12,%xmm8
- vpaddd %xmm15,%xmm11,%xmm11
- vpxor %xmm13,%xmm10,%xmm6
- vmovdqa %xmm3,16-128(%rax)
- vpaddd %xmm3,%xmm11,%xmm11
- vpxor 160-128(%rax),%xmm4,%xmm4
- vpsrld $27,%xmm12,%xmm9
- vpxor %xmm14,%xmm6,%xmm6
- vpxor %xmm1,%xmm4,%xmm4
-
- vpslld $30,%xmm13,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm11,%xmm11
- vpsrld $31,%xmm4,%xmm5
- vpaddd %xmm4,%xmm4,%xmm4
-
- vpsrld $2,%xmm13,%xmm13
- vpaddd %xmm8,%xmm11,%xmm11
- vpor %xmm5,%xmm4,%xmm4
- vpor %xmm7,%xmm13,%xmm13
- vpxor %xmm2,%xmm0,%xmm0
- vmovdqa 80-128(%rax),%xmm2
-
- vpslld $5,%xmm11,%xmm8
- vpaddd %xmm15,%xmm10,%xmm10
- vpxor %xmm12,%xmm14,%xmm6
- vmovdqa %xmm4,32-128(%rax)
- vpaddd %xmm4,%xmm10,%xmm10
- vpxor 176-128(%rax),%xmm0,%xmm0
- vpsrld $27,%xmm11,%xmm9
- vpxor %xmm13,%xmm6,%xmm6
- vpxor %xmm2,%xmm0,%xmm0
-
- vpslld $30,%xmm12,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm10,%xmm10
- vpsrld $31,%xmm0,%xmm5
- vpaddd %xmm0,%xmm0,%xmm0
-
- vpsrld $2,%xmm12,%xmm12
- vpaddd %xmm8,%xmm10,%xmm10
- vpor %xmm5,%xmm0,%xmm0
- vpor %xmm7,%xmm12,%xmm12
- vpxor %xmm3,%xmm1,%xmm1
- vmovdqa 96-128(%rax),%xmm3
-
- vpslld $5,%xmm10,%xmm8
- vpaddd %xmm15,%xmm14,%xmm14
- vpxor %xmm11,%xmm13,%xmm6
- vmovdqa %xmm0,48-128(%rax)
- vpaddd %xmm0,%xmm14,%xmm14
- vpxor 192-128(%rax),%xmm1,%xmm1
- vpsrld $27,%xmm10,%xmm9
- vpxor %xmm12,%xmm6,%xmm6
- vpxor %xmm3,%xmm1,%xmm1
-
- vpslld $30,%xmm11,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm14,%xmm14
- vpsrld $31,%xmm1,%xmm5
- vpaddd %xmm1,%xmm1,%xmm1
-
- vpsrld $2,%xmm11,%xmm11
- vpaddd %xmm8,%xmm14,%xmm14
- vpor %xmm5,%xmm1,%xmm1
- vpor %xmm7,%xmm11,%xmm11
- vpxor %xmm4,%xmm2,%xmm2
- vmovdqa 112-128(%rax),%xmm4
-
- vpslld $5,%xmm14,%xmm8
- vpaddd %xmm15,%xmm13,%xmm13
- vpxor %xmm10,%xmm12,%xmm6
- vmovdqa %xmm1,64-128(%rax)
- vpaddd %xmm1,%xmm13,%xmm13
- vpxor 208-128(%rax),%xmm2,%xmm2
- vpsrld $27,%xmm14,%xmm9
- vpxor %xmm11,%xmm6,%xmm6
- vpxor %xmm4,%xmm2,%xmm2
-
- vpslld $30,%xmm10,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm13,%xmm13
- vpsrld $31,%xmm2,%xmm5
- vpaddd %xmm2,%xmm2,%xmm2
-
- vpsrld $2,%xmm10,%xmm10
- vpaddd %xmm8,%xmm13,%xmm13
- vpor %xmm5,%xmm2,%xmm2
- vpor %xmm7,%xmm10,%xmm10
- vpxor %xmm0,%xmm3,%xmm3
- vmovdqa 128-128(%rax),%xmm0
-
- vpslld $5,%xmm13,%xmm8
- vpaddd %xmm15,%xmm12,%xmm12
- vpxor %xmm14,%xmm11,%xmm6
- vmovdqa %xmm2,80-128(%rax)
- vpaddd %xmm2,%xmm12,%xmm12
- vpxor 224-128(%rax),%xmm3,%xmm3
- vpsrld $27,%xmm13,%xmm9
- vpxor %xmm10,%xmm6,%xmm6
- vpxor %xmm0,%xmm3,%xmm3
-
- vpslld $30,%xmm14,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm12,%xmm12
- vpsrld $31,%xmm3,%xmm5
- vpaddd %xmm3,%xmm3,%xmm3
-
- vpsrld $2,%xmm14,%xmm14
- vpaddd %xmm8,%xmm12,%xmm12
- vpor %xmm5,%xmm3,%xmm3
- vpor %xmm7,%xmm14,%xmm14
- vpxor %xmm1,%xmm4,%xmm4
- vmovdqa 144-128(%rax),%xmm1
-
- vpslld $5,%xmm12,%xmm8
- vpaddd %xmm15,%xmm11,%xmm11
- vpxor %xmm13,%xmm10,%xmm6
- vmovdqa %xmm3,96-128(%rax)
- vpaddd %xmm3,%xmm11,%xmm11
- vpxor 240-128(%rax),%xmm4,%xmm4
- vpsrld $27,%xmm12,%xmm9
- vpxor %xmm14,%xmm6,%xmm6
- vpxor %xmm1,%xmm4,%xmm4
-
- vpslld $30,%xmm13,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm11,%xmm11
- vpsrld $31,%xmm4,%xmm5
- vpaddd %xmm4,%xmm4,%xmm4
-
- vpsrld $2,%xmm13,%xmm13
- vpaddd %xmm8,%xmm11,%xmm11
- vpor %xmm5,%xmm4,%xmm4
- vpor %xmm7,%xmm13,%xmm13
- vpxor %xmm2,%xmm0,%xmm0
- vmovdqa 160-128(%rax),%xmm2
-
- vpslld $5,%xmm11,%xmm8
- vpaddd %xmm15,%xmm10,%xmm10
- vpxor %xmm12,%xmm14,%xmm6
- vmovdqa %xmm4,112-128(%rax)
- vpaddd %xmm4,%xmm10,%xmm10
- vpxor 0-128(%rax),%xmm0,%xmm0
- vpsrld $27,%xmm11,%xmm9
- vpxor %xmm13,%xmm6,%xmm6
- vpxor %xmm2,%xmm0,%xmm0
-
- vpslld $30,%xmm12,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm10,%xmm10
- vpsrld $31,%xmm0,%xmm5
- vpaddd %xmm0,%xmm0,%xmm0
-
- vpsrld $2,%xmm12,%xmm12
- vpaddd %xmm8,%xmm10,%xmm10
- vpor %xmm5,%xmm0,%xmm0
- vpor %xmm7,%xmm12,%xmm12
- vmovdqa 32(%rbp),%xmm15
- vpxor %xmm3,%xmm1,%xmm1
- vmovdqa 176-128(%rax),%xmm3
-
- vpaddd %xmm15,%xmm14,%xmm14
- vpslld $5,%xmm10,%xmm8
- vpand %xmm12,%xmm13,%xmm7
- vpxor 16-128(%rax),%xmm1,%xmm1
-
- vpaddd %xmm7,%xmm14,%xmm14
- vpsrld $27,%xmm10,%xmm9
- vpxor %xmm12,%xmm13,%xmm6
- vpxor %xmm3,%xmm1,%xmm1
-
- vmovdqu %xmm0,128-128(%rax)
- vpaddd %xmm0,%xmm14,%xmm14
- vpor %xmm9,%xmm8,%xmm8
- vpsrld $31,%xmm1,%xmm5
- vpand %xmm11,%xmm6,%xmm6
- vpaddd %xmm1,%xmm1,%xmm1
-
- vpslld $30,%xmm11,%xmm7
- vpaddd %xmm6,%xmm14,%xmm14
-
- vpsrld $2,%xmm11,%xmm11
- vpaddd %xmm8,%xmm14,%xmm14
- vpor %xmm5,%xmm1,%xmm1
- vpor %xmm7,%xmm11,%xmm11
- vpxor %xmm4,%xmm2,%xmm2
- vmovdqa 192-128(%rax),%xmm4
-
- vpaddd %xmm15,%xmm13,%xmm13
- vpslld $5,%xmm14,%xmm8
- vpand %xmm11,%xmm12,%xmm7
- vpxor 32-128(%rax),%xmm2,%xmm2
-
- vpaddd %xmm7,%xmm13,%xmm13
- vpsrld $27,%xmm14,%xmm9
- vpxor %xmm11,%xmm12,%xmm6
- vpxor %xmm4,%xmm2,%xmm2
-
- vmovdqu %xmm1,144-128(%rax)
- vpaddd %xmm1,%xmm13,%xmm13
- vpor %xmm9,%xmm8,%xmm8
- vpsrld $31,%xmm2,%xmm5
- vpand %xmm10,%xmm6,%xmm6
- vpaddd %xmm2,%xmm2,%xmm2
-
- vpslld $30,%xmm10,%xmm7
- vpaddd %xmm6,%xmm13,%xmm13
-
- vpsrld $2,%xmm10,%xmm10
- vpaddd %xmm8,%xmm13,%xmm13
- vpor %xmm5,%xmm2,%xmm2
- vpor %xmm7,%xmm10,%xmm10
- vpxor %xmm0,%xmm3,%xmm3
- vmovdqa 208-128(%rax),%xmm0
-
- vpaddd %xmm15,%xmm12,%xmm12
- vpslld $5,%xmm13,%xmm8
- vpand %xmm10,%xmm11,%xmm7
- vpxor 48-128(%rax),%xmm3,%xmm3
-
- vpaddd %xmm7,%xmm12,%xmm12
- vpsrld $27,%xmm13,%xmm9
- vpxor %xmm10,%xmm11,%xmm6
- vpxor %xmm0,%xmm3,%xmm3
-
- vmovdqu %xmm2,160-128(%rax)
- vpaddd %xmm2,%xmm12,%xmm12
- vpor %xmm9,%xmm8,%xmm8
- vpsrld $31,%xmm3,%xmm5
- vpand %xmm14,%xmm6,%xmm6
- vpaddd %xmm3,%xmm3,%xmm3
-
- vpslld $30,%xmm14,%xmm7
- vpaddd %xmm6,%xmm12,%xmm12
-
- vpsrld $2,%xmm14,%xmm14
- vpaddd %xmm8,%xmm12,%xmm12
- vpor %xmm5,%xmm3,%xmm3
- vpor %xmm7,%xmm14,%xmm14
- vpxor %xmm1,%xmm4,%xmm4
- vmovdqa 224-128(%rax),%xmm1
-
- vpaddd %xmm15,%xmm11,%xmm11
- vpslld $5,%xmm12,%xmm8
- vpand %xmm14,%xmm10,%xmm7
- vpxor 64-128(%rax),%xmm4,%xmm4
-
- vpaddd %xmm7,%xmm11,%xmm11
- vpsrld $27,%xmm12,%xmm9
- vpxor %xmm14,%xmm10,%xmm6
- vpxor %xmm1,%xmm4,%xmm4
-
- vmovdqu %xmm3,176-128(%rax)
- vpaddd %xmm3,%xmm11,%xmm11
- vpor %xmm9,%xmm8,%xmm8
- vpsrld $31,%xmm4,%xmm5
- vpand %xmm13,%xmm6,%xmm6
- vpaddd %xmm4,%xmm4,%xmm4
-
- vpslld $30,%xmm13,%xmm7
- vpaddd %xmm6,%xmm11,%xmm11
-
- vpsrld $2,%xmm13,%xmm13
- vpaddd %xmm8,%xmm11,%xmm11
- vpor %xmm5,%xmm4,%xmm4
- vpor %xmm7,%xmm13,%xmm13
- vpxor %xmm2,%xmm0,%xmm0
- vmovdqa 240-128(%rax),%xmm2
-
- vpaddd %xmm15,%xmm10,%xmm10
- vpslld $5,%xmm11,%xmm8
- vpand %xmm13,%xmm14,%xmm7
- vpxor 80-128(%rax),%xmm0,%xmm0
-
- vpaddd %xmm7,%xmm10,%xmm10
- vpsrld $27,%xmm11,%xmm9
- vpxor %xmm13,%xmm14,%xmm6
- vpxor %xmm2,%xmm0,%xmm0
-
- vmovdqu %xmm4,192-128(%rax)
- vpaddd %xmm4,%xmm10,%xmm10
- vpor %xmm9,%xmm8,%xmm8
- vpsrld $31,%xmm0,%xmm5
- vpand %xmm12,%xmm6,%xmm6
- vpaddd %xmm0,%xmm0,%xmm0
-
- vpslld $30,%xmm12,%xmm7
- vpaddd %xmm6,%xmm10,%xmm10
-
- vpsrld $2,%xmm12,%xmm12
- vpaddd %xmm8,%xmm10,%xmm10
- vpor %xmm5,%xmm0,%xmm0
- vpor %xmm7,%xmm12,%xmm12
- vpxor %xmm3,%xmm1,%xmm1
- vmovdqa 0-128(%rax),%xmm3
-
- vpaddd %xmm15,%xmm14,%xmm14
- vpslld $5,%xmm10,%xmm8
- vpand %xmm12,%xmm13,%xmm7
- vpxor 96-128(%rax),%xmm1,%xmm1
-
- vpaddd %xmm7,%xmm14,%xmm14
- vpsrld $27,%xmm10,%xmm9
- vpxor %xmm12,%xmm13,%xmm6
- vpxor %xmm3,%xmm1,%xmm1
-
- vmovdqu %xmm0,208-128(%rax)
- vpaddd %xmm0,%xmm14,%xmm14
- vpor %xmm9,%xmm8,%xmm8
- vpsrld $31,%xmm1,%xmm5
- vpand %xmm11,%xmm6,%xmm6
- vpaddd %xmm1,%xmm1,%xmm1
-
- vpslld $30,%xmm11,%xmm7
- vpaddd %xmm6,%xmm14,%xmm14
-
- vpsrld $2,%xmm11,%xmm11
- vpaddd %xmm8,%xmm14,%xmm14
- vpor %xmm5,%xmm1,%xmm1
- vpor %xmm7,%xmm11,%xmm11
- vpxor %xmm4,%xmm2,%xmm2
- vmovdqa 16-128(%rax),%xmm4
-
- vpaddd %xmm15,%xmm13,%xmm13
- vpslld $5,%xmm14,%xmm8
- vpand %xmm11,%xmm12,%xmm7
- vpxor 112-128(%rax),%xmm2,%xmm2
-
- vpaddd %xmm7,%xmm13,%xmm13
- vpsrld $27,%xmm14,%xmm9
- vpxor %xmm11,%xmm12,%xmm6
- vpxor %xmm4,%xmm2,%xmm2
-
- vmovdqu %xmm1,224-128(%rax)
- vpaddd %xmm1,%xmm13,%xmm13
- vpor %xmm9,%xmm8,%xmm8
- vpsrld $31,%xmm2,%xmm5
- vpand %xmm10,%xmm6,%xmm6
- vpaddd %xmm2,%xmm2,%xmm2
-
- vpslld $30,%xmm10,%xmm7
- vpaddd %xmm6,%xmm13,%xmm13
-
- vpsrld $2,%xmm10,%xmm10
- vpaddd %xmm8,%xmm13,%xmm13
- vpor %xmm5,%xmm2,%xmm2
- vpor %xmm7,%xmm10,%xmm10
- vpxor %xmm0,%xmm3,%xmm3
- vmovdqa 32-128(%rax),%xmm0
-
- vpaddd %xmm15,%xmm12,%xmm12
- vpslld $5,%xmm13,%xmm8
- vpand %xmm10,%xmm11,%xmm7
- vpxor 128-128(%rax),%xmm3,%xmm3
-
- vpaddd %xmm7,%xmm12,%xmm12
- vpsrld $27,%xmm13,%xmm9
- vpxor %xmm10,%xmm11,%xmm6
- vpxor %xmm0,%xmm3,%xmm3
-
- vmovdqu %xmm2,240-128(%rax)
- vpaddd %xmm2,%xmm12,%xmm12
- vpor %xmm9,%xmm8,%xmm8
- vpsrld $31,%xmm3,%xmm5
- vpand %xmm14,%xmm6,%xmm6
- vpaddd %xmm3,%xmm3,%xmm3
-
- vpslld $30,%xmm14,%xmm7
- vpaddd %xmm6,%xmm12,%xmm12
-
- vpsrld $2,%xmm14,%xmm14
- vpaddd %xmm8,%xmm12,%xmm12
- vpor %xmm5,%xmm3,%xmm3
- vpor %xmm7,%xmm14,%xmm14
- vpxor %xmm1,%xmm4,%xmm4
- vmovdqa 48-128(%rax),%xmm1
-
- vpaddd %xmm15,%xmm11,%xmm11
- vpslld $5,%xmm12,%xmm8
- vpand %xmm14,%xmm10,%xmm7
- vpxor 144-128(%rax),%xmm4,%xmm4
-
- vpaddd %xmm7,%xmm11,%xmm11
- vpsrld $27,%xmm12,%xmm9
- vpxor %xmm14,%xmm10,%xmm6
- vpxor %xmm1,%xmm4,%xmm4
-
- vmovdqu %xmm3,0-128(%rax)
- vpaddd %xmm3,%xmm11,%xmm11
- vpor %xmm9,%xmm8,%xmm8
- vpsrld $31,%xmm4,%xmm5
- vpand %xmm13,%xmm6,%xmm6
- vpaddd %xmm4,%xmm4,%xmm4
-
- vpslld $30,%xmm13,%xmm7
- vpaddd %xmm6,%xmm11,%xmm11
-
- vpsrld $2,%xmm13,%xmm13
- vpaddd %xmm8,%xmm11,%xmm11
- vpor %xmm5,%xmm4,%xmm4
- vpor %xmm7,%xmm13,%xmm13
- vpxor %xmm2,%xmm0,%xmm0
- vmovdqa 64-128(%rax),%xmm2
-
- vpaddd %xmm15,%xmm10,%xmm10
- vpslld $5,%xmm11,%xmm8
- vpand %xmm13,%xmm14,%xmm7
- vpxor 160-128(%rax),%xmm0,%xmm0
-
- vpaddd %xmm7,%xmm10,%xmm10
- vpsrld $27,%xmm11,%xmm9
- vpxor %xmm13,%xmm14,%xmm6
- vpxor %xmm2,%xmm0,%xmm0
-
- vmovdqu %xmm4,16-128(%rax)
- vpaddd %xmm4,%xmm10,%xmm10
- vpor %xmm9,%xmm8,%xmm8
- vpsrld $31,%xmm0,%xmm5
- vpand %xmm12,%xmm6,%xmm6
- vpaddd %xmm0,%xmm0,%xmm0
-
- vpslld $30,%xmm12,%xmm7
- vpaddd %xmm6,%xmm10,%xmm10
-
- vpsrld $2,%xmm12,%xmm12
- vpaddd %xmm8,%xmm10,%xmm10
- vpor %xmm5,%xmm0,%xmm0
- vpor %xmm7,%xmm12,%xmm12
- vpxor %xmm3,%xmm1,%xmm1
- vmovdqa 80-128(%rax),%xmm3
-
- vpaddd %xmm15,%xmm14,%xmm14
- vpslld $5,%xmm10,%xmm8
- vpand %xmm12,%xmm13,%xmm7
- vpxor 176-128(%rax),%xmm1,%xmm1
-
- vpaddd %xmm7,%xmm14,%xmm14
- vpsrld $27,%xmm10,%xmm9
- vpxor %xmm12,%xmm13,%xmm6
- vpxor %xmm3,%xmm1,%xmm1
-
- vmovdqu %xmm0,32-128(%rax)
- vpaddd %xmm0,%xmm14,%xmm14
- vpor %xmm9,%xmm8,%xmm8
- vpsrld $31,%xmm1,%xmm5
- vpand %xmm11,%xmm6,%xmm6
- vpaddd %xmm1,%xmm1,%xmm1
-
- vpslld $30,%xmm11,%xmm7
- vpaddd %xmm6,%xmm14,%xmm14
-
- vpsrld $2,%xmm11,%xmm11
- vpaddd %xmm8,%xmm14,%xmm14
- vpor %xmm5,%xmm1,%xmm1
- vpor %xmm7,%xmm11,%xmm11
- vpxor %xmm4,%xmm2,%xmm2
- vmovdqa 96-128(%rax),%xmm4
-
- vpaddd %xmm15,%xmm13,%xmm13
- vpslld $5,%xmm14,%xmm8
- vpand %xmm11,%xmm12,%xmm7
- vpxor 192-128(%rax),%xmm2,%xmm2
-
- vpaddd %xmm7,%xmm13,%xmm13
- vpsrld $27,%xmm14,%xmm9
- vpxor %xmm11,%xmm12,%xmm6
- vpxor %xmm4,%xmm2,%xmm2
-
- vmovdqu %xmm1,48-128(%rax)
- vpaddd %xmm1,%xmm13,%xmm13
- vpor %xmm9,%xmm8,%xmm8
- vpsrld $31,%xmm2,%xmm5
- vpand %xmm10,%xmm6,%xmm6
- vpaddd %xmm2,%xmm2,%xmm2
-
- vpslld $30,%xmm10,%xmm7
- vpaddd %xmm6,%xmm13,%xmm13
-
- vpsrld $2,%xmm10,%xmm10
- vpaddd %xmm8,%xmm13,%xmm13
- vpor %xmm5,%xmm2,%xmm2
- vpor %xmm7,%xmm10,%xmm10
- vpxor %xmm0,%xmm3,%xmm3
- vmovdqa 112-128(%rax),%xmm0
-
- vpaddd %xmm15,%xmm12,%xmm12
- vpslld $5,%xmm13,%xmm8
- vpand %xmm10,%xmm11,%xmm7
- vpxor 208-128(%rax),%xmm3,%xmm3
-
- vpaddd %xmm7,%xmm12,%xmm12
- vpsrld $27,%xmm13,%xmm9
- vpxor %xmm10,%xmm11,%xmm6
- vpxor %xmm0,%xmm3,%xmm3
-
- vmovdqu %xmm2,64-128(%rax)
- vpaddd %xmm2,%xmm12,%xmm12
- vpor %xmm9,%xmm8,%xmm8
- vpsrld $31,%xmm3,%xmm5
- vpand %xmm14,%xmm6,%xmm6
- vpaddd %xmm3,%xmm3,%xmm3
-
- vpslld $30,%xmm14,%xmm7
- vpaddd %xmm6,%xmm12,%xmm12
-
- vpsrld $2,%xmm14,%xmm14
- vpaddd %xmm8,%xmm12,%xmm12
- vpor %xmm5,%xmm3,%xmm3
- vpor %xmm7,%xmm14,%xmm14
- vpxor %xmm1,%xmm4,%xmm4
- vmovdqa 128-128(%rax),%xmm1
-
- vpaddd %xmm15,%xmm11,%xmm11
- vpslld $5,%xmm12,%xmm8
- vpand %xmm14,%xmm10,%xmm7
- vpxor 224-128(%rax),%xmm4,%xmm4
-
- vpaddd %xmm7,%xmm11,%xmm11
- vpsrld $27,%xmm12,%xmm9
- vpxor %xmm14,%xmm10,%xmm6
- vpxor %xmm1,%xmm4,%xmm4
-
- vmovdqu %xmm3,80-128(%rax)
- vpaddd %xmm3,%xmm11,%xmm11
- vpor %xmm9,%xmm8,%xmm8
- vpsrld $31,%xmm4,%xmm5
- vpand %xmm13,%xmm6,%xmm6
- vpaddd %xmm4,%xmm4,%xmm4
-
- vpslld $30,%xmm13,%xmm7
- vpaddd %xmm6,%xmm11,%xmm11
-
- vpsrld $2,%xmm13,%xmm13
- vpaddd %xmm8,%xmm11,%xmm11
- vpor %xmm5,%xmm4,%xmm4
- vpor %xmm7,%xmm13,%xmm13
- vpxor %xmm2,%xmm0,%xmm0
- vmovdqa 144-128(%rax),%xmm2
-
- vpaddd %xmm15,%xmm10,%xmm10
- vpslld $5,%xmm11,%xmm8
- vpand %xmm13,%xmm14,%xmm7
- vpxor 240-128(%rax),%xmm0,%xmm0
-
- vpaddd %xmm7,%xmm10,%xmm10
- vpsrld $27,%xmm11,%xmm9
- vpxor %xmm13,%xmm14,%xmm6
- vpxor %xmm2,%xmm0,%xmm0
-
- vmovdqu %xmm4,96-128(%rax)
- vpaddd %xmm4,%xmm10,%xmm10
- vpor %xmm9,%xmm8,%xmm8
- vpsrld $31,%xmm0,%xmm5
- vpand %xmm12,%xmm6,%xmm6
- vpaddd %xmm0,%xmm0,%xmm0
-
- vpslld $30,%xmm12,%xmm7
- vpaddd %xmm6,%xmm10,%xmm10
-
- vpsrld $2,%xmm12,%xmm12
- vpaddd %xmm8,%xmm10,%xmm10
- vpor %xmm5,%xmm0,%xmm0
- vpor %xmm7,%xmm12,%xmm12
- vpxor %xmm3,%xmm1,%xmm1
- vmovdqa 160-128(%rax),%xmm3
-
- vpaddd %xmm15,%xmm14,%xmm14
- vpslld $5,%xmm10,%xmm8
- vpand %xmm12,%xmm13,%xmm7
- vpxor 0-128(%rax),%xmm1,%xmm1
-
- vpaddd %xmm7,%xmm14,%xmm14
- vpsrld $27,%xmm10,%xmm9
- vpxor %xmm12,%xmm13,%xmm6
- vpxor %xmm3,%xmm1,%xmm1
-
- vmovdqu %xmm0,112-128(%rax)
- vpaddd %xmm0,%xmm14,%xmm14
- vpor %xmm9,%xmm8,%xmm8
- vpsrld $31,%xmm1,%xmm5
- vpand %xmm11,%xmm6,%xmm6
- vpaddd %xmm1,%xmm1,%xmm1
-
- vpslld $30,%xmm11,%xmm7
- vpaddd %xmm6,%xmm14,%xmm14
-
- vpsrld $2,%xmm11,%xmm11
- vpaddd %xmm8,%xmm14,%xmm14
- vpor %xmm5,%xmm1,%xmm1
- vpor %xmm7,%xmm11,%xmm11
- vpxor %xmm4,%xmm2,%xmm2
- vmovdqa 176-128(%rax),%xmm4
-
- vpaddd %xmm15,%xmm13,%xmm13
- vpslld $5,%xmm14,%xmm8
- vpand %xmm11,%xmm12,%xmm7
- vpxor 16-128(%rax),%xmm2,%xmm2
-
- vpaddd %xmm7,%xmm13,%xmm13
- vpsrld $27,%xmm14,%xmm9
- vpxor %xmm11,%xmm12,%xmm6
- vpxor %xmm4,%xmm2,%xmm2
-
- vmovdqu %xmm1,128-128(%rax)
- vpaddd %xmm1,%xmm13,%xmm13
- vpor %xmm9,%xmm8,%xmm8
- vpsrld $31,%xmm2,%xmm5
- vpand %xmm10,%xmm6,%xmm6
- vpaddd %xmm2,%xmm2,%xmm2
-
- vpslld $30,%xmm10,%xmm7
- vpaddd %xmm6,%xmm13,%xmm13
-
- vpsrld $2,%xmm10,%xmm10
- vpaddd %xmm8,%xmm13,%xmm13
- vpor %xmm5,%xmm2,%xmm2
- vpor %xmm7,%xmm10,%xmm10
- vpxor %xmm0,%xmm3,%xmm3
- vmovdqa 192-128(%rax),%xmm0
-
- vpaddd %xmm15,%xmm12,%xmm12
- vpslld $5,%xmm13,%xmm8
- vpand %xmm10,%xmm11,%xmm7
- vpxor 32-128(%rax),%xmm3,%xmm3
-
- vpaddd %xmm7,%xmm12,%xmm12
- vpsrld $27,%xmm13,%xmm9
- vpxor %xmm10,%xmm11,%xmm6
- vpxor %xmm0,%xmm3,%xmm3
-
- vmovdqu %xmm2,144-128(%rax)
- vpaddd %xmm2,%xmm12,%xmm12
- vpor %xmm9,%xmm8,%xmm8
- vpsrld $31,%xmm3,%xmm5
- vpand %xmm14,%xmm6,%xmm6
- vpaddd %xmm3,%xmm3,%xmm3
-
- vpslld $30,%xmm14,%xmm7
- vpaddd %xmm6,%xmm12,%xmm12
-
- vpsrld $2,%xmm14,%xmm14
- vpaddd %xmm8,%xmm12,%xmm12
- vpor %xmm5,%xmm3,%xmm3
- vpor %xmm7,%xmm14,%xmm14
- vpxor %xmm1,%xmm4,%xmm4
- vmovdqa 208-128(%rax),%xmm1
-
- vpaddd %xmm15,%xmm11,%xmm11
- vpslld $5,%xmm12,%xmm8
- vpand %xmm14,%xmm10,%xmm7
- vpxor 48-128(%rax),%xmm4,%xmm4
-
- vpaddd %xmm7,%xmm11,%xmm11
- vpsrld $27,%xmm12,%xmm9
- vpxor %xmm14,%xmm10,%xmm6
- vpxor %xmm1,%xmm4,%xmm4
-
- vmovdqu %xmm3,160-128(%rax)
- vpaddd %xmm3,%xmm11,%xmm11
- vpor %xmm9,%xmm8,%xmm8
- vpsrld $31,%xmm4,%xmm5
- vpand %xmm13,%xmm6,%xmm6
- vpaddd %xmm4,%xmm4,%xmm4
-
- vpslld $30,%xmm13,%xmm7
- vpaddd %xmm6,%xmm11,%xmm11
-
- vpsrld $2,%xmm13,%xmm13
- vpaddd %xmm8,%xmm11,%xmm11
- vpor %xmm5,%xmm4,%xmm4
- vpor %xmm7,%xmm13,%xmm13
- vpxor %xmm2,%xmm0,%xmm0
- vmovdqa 224-128(%rax),%xmm2
-
- vpaddd %xmm15,%xmm10,%xmm10
- vpslld $5,%xmm11,%xmm8
- vpand %xmm13,%xmm14,%xmm7
- vpxor 64-128(%rax),%xmm0,%xmm0
-
- vpaddd %xmm7,%xmm10,%xmm10
- vpsrld $27,%xmm11,%xmm9
- vpxor %xmm13,%xmm14,%xmm6
- vpxor %xmm2,%xmm0,%xmm0
-
- vmovdqu %xmm4,176-128(%rax)
- vpaddd %xmm4,%xmm10,%xmm10
- vpor %xmm9,%xmm8,%xmm8
- vpsrld $31,%xmm0,%xmm5
- vpand %xmm12,%xmm6,%xmm6
- vpaddd %xmm0,%xmm0,%xmm0
-
- vpslld $30,%xmm12,%xmm7
- vpaddd %xmm6,%xmm10,%xmm10
-
- vpsrld $2,%xmm12,%xmm12
- vpaddd %xmm8,%xmm10,%xmm10
- vpor %xmm5,%xmm0,%xmm0
- vpor %xmm7,%xmm12,%xmm12
- vmovdqa 64(%rbp),%xmm15
- vpxor %xmm3,%xmm1,%xmm1
- vmovdqa 240-128(%rax),%xmm3
-
- vpslld $5,%xmm10,%xmm8
- vpaddd %xmm15,%xmm14,%xmm14
- vpxor %xmm11,%xmm13,%xmm6
- vmovdqa %xmm0,192-128(%rax)
- vpaddd %xmm0,%xmm14,%xmm14
- vpxor 80-128(%rax),%xmm1,%xmm1
- vpsrld $27,%xmm10,%xmm9
- vpxor %xmm12,%xmm6,%xmm6
- vpxor %xmm3,%xmm1,%xmm1
-
- vpslld $30,%xmm11,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm14,%xmm14
- vpsrld $31,%xmm1,%xmm5
- vpaddd %xmm1,%xmm1,%xmm1
-
- vpsrld $2,%xmm11,%xmm11
- vpaddd %xmm8,%xmm14,%xmm14
- vpor %xmm5,%xmm1,%xmm1
- vpor %xmm7,%xmm11,%xmm11
- vpxor %xmm4,%xmm2,%xmm2
- vmovdqa 0-128(%rax),%xmm4
-
- vpslld $5,%xmm14,%xmm8
- vpaddd %xmm15,%xmm13,%xmm13
- vpxor %xmm10,%xmm12,%xmm6
- vmovdqa %xmm1,208-128(%rax)
- vpaddd %xmm1,%xmm13,%xmm13
- vpxor 96-128(%rax),%xmm2,%xmm2
- vpsrld $27,%xmm14,%xmm9
- vpxor %xmm11,%xmm6,%xmm6
- vpxor %xmm4,%xmm2,%xmm2
-
- vpslld $30,%xmm10,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm13,%xmm13
- vpsrld $31,%xmm2,%xmm5
- vpaddd %xmm2,%xmm2,%xmm2
-
- vpsrld $2,%xmm10,%xmm10
- vpaddd %xmm8,%xmm13,%xmm13
- vpor %xmm5,%xmm2,%xmm2
- vpor %xmm7,%xmm10,%xmm10
- vpxor %xmm0,%xmm3,%xmm3
- vmovdqa 16-128(%rax),%xmm0
-
- vpslld $5,%xmm13,%xmm8
- vpaddd %xmm15,%xmm12,%xmm12
- vpxor %xmm14,%xmm11,%xmm6
- vmovdqa %xmm2,224-128(%rax)
- vpaddd %xmm2,%xmm12,%xmm12
- vpxor 112-128(%rax),%xmm3,%xmm3
- vpsrld $27,%xmm13,%xmm9
- vpxor %xmm10,%xmm6,%xmm6
- vpxor %xmm0,%xmm3,%xmm3
-
- vpslld $30,%xmm14,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm12,%xmm12
- vpsrld $31,%xmm3,%xmm5
- vpaddd %xmm3,%xmm3,%xmm3
-
- vpsrld $2,%xmm14,%xmm14
- vpaddd %xmm8,%xmm12,%xmm12
- vpor %xmm5,%xmm3,%xmm3
- vpor %xmm7,%xmm14,%xmm14
- vpxor %xmm1,%xmm4,%xmm4
- vmovdqa 32-128(%rax),%xmm1
-
- vpslld $5,%xmm12,%xmm8
- vpaddd %xmm15,%xmm11,%xmm11
- vpxor %xmm13,%xmm10,%xmm6
- vmovdqa %xmm3,240-128(%rax)
- vpaddd %xmm3,%xmm11,%xmm11
- vpxor 128-128(%rax),%xmm4,%xmm4
- vpsrld $27,%xmm12,%xmm9
- vpxor %xmm14,%xmm6,%xmm6
- vpxor %xmm1,%xmm4,%xmm4
-
- vpslld $30,%xmm13,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm11,%xmm11
- vpsrld $31,%xmm4,%xmm5
- vpaddd %xmm4,%xmm4,%xmm4
-
- vpsrld $2,%xmm13,%xmm13
- vpaddd %xmm8,%xmm11,%xmm11
- vpor %xmm5,%xmm4,%xmm4
- vpor %xmm7,%xmm13,%xmm13
- vpxor %xmm2,%xmm0,%xmm0
- vmovdqa 48-128(%rax),%xmm2
-
- vpslld $5,%xmm11,%xmm8
- vpaddd %xmm15,%xmm10,%xmm10
- vpxor %xmm12,%xmm14,%xmm6
- vmovdqa %xmm4,0-128(%rax)
- vpaddd %xmm4,%xmm10,%xmm10
- vpxor 144-128(%rax),%xmm0,%xmm0
- vpsrld $27,%xmm11,%xmm9
- vpxor %xmm13,%xmm6,%xmm6
- vpxor %xmm2,%xmm0,%xmm0
-
- vpslld $30,%xmm12,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm10,%xmm10
- vpsrld $31,%xmm0,%xmm5
- vpaddd %xmm0,%xmm0,%xmm0
-
- vpsrld $2,%xmm12,%xmm12
- vpaddd %xmm8,%xmm10,%xmm10
- vpor %xmm5,%xmm0,%xmm0
- vpor %xmm7,%xmm12,%xmm12
- vpxor %xmm3,%xmm1,%xmm1
- vmovdqa 64-128(%rax),%xmm3
-
- vpslld $5,%xmm10,%xmm8
- vpaddd %xmm15,%xmm14,%xmm14
- vpxor %xmm11,%xmm13,%xmm6
- vmovdqa %xmm0,16-128(%rax)
- vpaddd %xmm0,%xmm14,%xmm14
- vpxor 160-128(%rax),%xmm1,%xmm1
- vpsrld $27,%xmm10,%xmm9
- vpxor %xmm12,%xmm6,%xmm6
- vpxor %xmm3,%xmm1,%xmm1
-
- vpslld $30,%xmm11,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm14,%xmm14
- vpsrld $31,%xmm1,%xmm5
- vpaddd %xmm1,%xmm1,%xmm1
-
- vpsrld $2,%xmm11,%xmm11
- vpaddd %xmm8,%xmm14,%xmm14
- vpor %xmm5,%xmm1,%xmm1
- vpor %xmm7,%xmm11,%xmm11
- vpxor %xmm4,%xmm2,%xmm2
- vmovdqa 80-128(%rax),%xmm4
-
- vpslld $5,%xmm14,%xmm8
- vpaddd %xmm15,%xmm13,%xmm13
- vpxor %xmm10,%xmm12,%xmm6
- vmovdqa %xmm1,32-128(%rax)
- vpaddd %xmm1,%xmm13,%xmm13
- vpxor 176-128(%rax),%xmm2,%xmm2
- vpsrld $27,%xmm14,%xmm9
- vpxor %xmm11,%xmm6,%xmm6
- vpxor %xmm4,%xmm2,%xmm2
-
- vpslld $30,%xmm10,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm13,%xmm13
- vpsrld $31,%xmm2,%xmm5
- vpaddd %xmm2,%xmm2,%xmm2
-
- vpsrld $2,%xmm10,%xmm10
- vpaddd %xmm8,%xmm13,%xmm13
- vpor %xmm5,%xmm2,%xmm2
- vpor %xmm7,%xmm10,%xmm10
- vpxor %xmm0,%xmm3,%xmm3
- vmovdqa 96-128(%rax),%xmm0
-
- vpslld $5,%xmm13,%xmm8
- vpaddd %xmm15,%xmm12,%xmm12
- vpxor %xmm14,%xmm11,%xmm6
- vmovdqa %xmm2,48-128(%rax)
- vpaddd %xmm2,%xmm12,%xmm12
- vpxor 192-128(%rax),%xmm3,%xmm3
- vpsrld $27,%xmm13,%xmm9
- vpxor %xmm10,%xmm6,%xmm6
- vpxor %xmm0,%xmm3,%xmm3
-
- vpslld $30,%xmm14,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm12,%xmm12
- vpsrld $31,%xmm3,%xmm5
- vpaddd %xmm3,%xmm3,%xmm3
-
- vpsrld $2,%xmm14,%xmm14
- vpaddd %xmm8,%xmm12,%xmm12
- vpor %xmm5,%xmm3,%xmm3
- vpor %xmm7,%xmm14,%xmm14
- vpxor %xmm1,%xmm4,%xmm4
- vmovdqa 112-128(%rax),%xmm1
-
- vpslld $5,%xmm12,%xmm8
- vpaddd %xmm15,%xmm11,%xmm11
- vpxor %xmm13,%xmm10,%xmm6
- vmovdqa %xmm3,64-128(%rax)
- vpaddd %xmm3,%xmm11,%xmm11
- vpxor 208-128(%rax),%xmm4,%xmm4
- vpsrld $27,%xmm12,%xmm9
- vpxor %xmm14,%xmm6,%xmm6
- vpxor %xmm1,%xmm4,%xmm4
-
- vpslld $30,%xmm13,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm11,%xmm11
- vpsrld $31,%xmm4,%xmm5
- vpaddd %xmm4,%xmm4,%xmm4
-
- vpsrld $2,%xmm13,%xmm13
- vpaddd %xmm8,%xmm11,%xmm11
- vpor %xmm5,%xmm4,%xmm4
- vpor %xmm7,%xmm13,%xmm13
- vpxor %xmm2,%xmm0,%xmm0
- vmovdqa 128-128(%rax),%xmm2
-
- vpslld $5,%xmm11,%xmm8
- vpaddd %xmm15,%xmm10,%xmm10
- vpxor %xmm12,%xmm14,%xmm6
- vmovdqa %xmm4,80-128(%rax)
- vpaddd %xmm4,%xmm10,%xmm10
- vpxor 224-128(%rax),%xmm0,%xmm0
- vpsrld $27,%xmm11,%xmm9
- vpxor %xmm13,%xmm6,%xmm6
- vpxor %xmm2,%xmm0,%xmm0
-
- vpslld $30,%xmm12,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm10,%xmm10
- vpsrld $31,%xmm0,%xmm5
- vpaddd %xmm0,%xmm0,%xmm0
-
- vpsrld $2,%xmm12,%xmm12
- vpaddd %xmm8,%xmm10,%xmm10
- vpor %xmm5,%xmm0,%xmm0
- vpor %xmm7,%xmm12,%xmm12
- vpxor %xmm3,%xmm1,%xmm1
- vmovdqa 144-128(%rax),%xmm3
-
- vpslld $5,%xmm10,%xmm8
- vpaddd %xmm15,%xmm14,%xmm14
- vpxor %xmm11,%xmm13,%xmm6
- vmovdqa %xmm0,96-128(%rax)
- vpaddd %xmm0,%xmm14,%xmm14
- vpxor 240-128(%rax),%xmm1,%xmm1
- vpsrld $27,%xmm10,%xmm9
- vpxor %xmm12,%xmm6,%xmm6
- vpxor %xmm3,%xmm1,%xmm1
-
- vpslld $30,%xmm11,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm14,%xmm14
- vpsrld $31,%xmm1,%xmm5
- vpaddd %xmm1,%xmm1,%xmm1
-
- vpsrld $2,%xmm11,%xmm11
- vpaddd %xmm8,%xmm14,%xmm14
- vpor %xmm5,%xmm1,%xmm1
- vpor %xmm7,%xmm11,%xmm11
- vpxor %xmm4,%xmm2,%xmm2
- vmovdqa 160-128(%rax),%xmm4
-
- vpslld $5,%xmm14,%xmm8
- vpaddd %xmm15,%xmm13,%xmm13
- vpxor %xmm10,%xmm12,%xmm6
- vmovdqa %xmm1,112-128(%rax)
- vpaddd %xmm1,%xmm13,%xmm13
- vpxor 0-128(%rax),%xmm2,%xmm2
- vpsrld $27,%xmm14,%xmm9
- vpxor %xmm11,%xmm6,%xmm6
- vpxor %xmm4,%xmm2,%xmm2
-
- vpslld $30,%xmm10,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm13,%xmm13
- vpsrld $31,%xmm2,%xmm5
- vpaddd %xmm2,%xmm2,%xmm2
-
- vpsrld $2,%xmm10,%xmm10
- vpaddd %xmm8,%xmm13,%xmm13
- vpor %xmm5,%xmm2,%xmm2
- vpor %xmm7,%xmm10,%xmm10
- vpxor %xmm0,%xmm3,%xmm3
- vmovdqa 176-128(%rax),%xmm0
-
- vpslld $5,%xmm13,%xmm8
- vpaddd %xmm15,%xmm12,%xmm12
- vpxor %xmm14,%xmm11,%xmm6
- vpaddd %xmm2,%xmm12,%xmm12
- vpxor 16-128(%rax),%xmm3,%xmm3
- vpsrld $27,%xmm13,%xmm9
- vpxor %xmm10,%xmm6,%xmm6
- vpxor %xmm0,%xmm3,%xmm3
-
- vpslld $30,%xmm14,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm12,%xmm12
- vpsrld $31,%xmm3,%xmm5
- vpaddd %xmm3,%xmm3,%xmm3
-
- vpsrld $2,%xmm14,%xmm14
- vpaddd %xmm8,%xmm12,%xmm12
- vpor %xmm5,%xmm3,%xmm3
- vpor %xmm7,%xmm14,%xmm14
- vpxor %xmm1,%xmm4,%xmm4
- vmovdqa 192-128(%rax),%xmm1
-
- vpslld $5,%xmm12,%xmm8
- vpaddd %xmm15,%xmm11,%xmm11
- vpxor %xmm13,%xmm10,%xmm6
- vpaddd %xmm3,%xmm11,%xmm11
- vpxor 32-128(%rax),%xmm4,%xmm4
- vpsrld $27,%xmm12,%xmm9
- vpxor %xmm14,%xmm6,%xmm6
- vpxor %xmm1,%xmm4,%xmm4
-
- vpslld $30,%xmm13,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm11,%xmm11
- vpsrld $31,%xmm4,%xmm5
- vpaddd %xmm4,%xmm4,%xmm4
-
- vpsrld $2,%xmm13,%xmm13
- vpaddd %xmm8,%xmm11,%xmm11
- vpor %xmm5,%xmm4,%xmm4
- vpor %xmm7,%xmm13,%xmm13
- vpxor %xmm2,%xmm0,%xmm0
- vmovdqa 208-128(%rax),%xmm2
-
- vpslld $5,%xmm11,%xmm8
- vpaddd %xmm15,%xmm10,%xmm10
- vpxor %xmm12,%xmm14,%xmm6
- vpaddd %xmm4,%xmm10,%xmm10
- vpxor 48-128(%rax),%xmm0,%xmm0
- vpsrld $27,%xmm11,%xmm9
- vpxor %xmm13,%xmm6,%xmm6
- vpxor %xmm2,%xmm0,%xmm0
-
- vpslld $30,%xmm12,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm10,%xmm10
- vpsrld $31,%xmm0,%xmm5
- vpaddd %xmm0,%xmm0,%xmm0
-
- vpsrld $2,%xmm12,%xmm12
- vpaddd %xmm8,%xmm10,%xmm10
- vpor %xmm5,%xmm0,%xmm0
- vpor %xmm7,%xmm12,%xmm12
- vpxor %xmm3,%xmm1,%xmm1
- vmovdqa 224-128(%rax),%xmm3
-
- vpslld $5,%xmm10,%xmm8
- vpaddd %xmm15,%xmm14,%xmm14
- vpxor %xmm11,%xmm13,%xmm6
- vpaddd %xmm0,%xmm14,%xmm14
- vpxor 64-128(%rax),%xmm1,%xmm1
- vpsrld $27,%xmm10,%xmm9
- vpxor %xmm12,%xmm6,%xmm6
- vpxor %xmm3,%xmm1,%xmm1
-
- vpslld $30,%xmm11,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm14,%xmm14
- vpsrld $31,%xmm1,%xmm5
- vpaddd %xmm1,%xmm1,%xmm1
-
- vpsrld $2,%xmm11,%xmm11
- vpaddd %xmm8,%xmm14,%xmm14
- vpor %xmm5,%xmm1,%xmm1
- vpor %xmm7,%xmm11,%xmm11
- vpxor %xmm4,%xmm2,%xmm2
- vmovdqa 240-128(%rax),%xmm4
-
- vpslld $5,%xmm14,%xmm8
- vpaddd %xmm15,%xmm13,%xmm13
- vpxor %xmm10,%xmm12,%xmm6
- vpaddd %xmm1,%xmm13,%xmm13
- vpxor 80-128(%rax),%xmm2,%xmm2
- vpsrld $27,%xmm14,%xmm9
- vpxor %xmm11,%xmm6,%xmm6
- vpxor %xmm4,%xmm2,%xmm2
-
- vpslld $30,%xmm10,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm13,%xmm13
- vpsrld $31,%xmm2,%xmm5
- vpaddd %xmm2,%xmm2,%xmm2
-
- vpsrld $2,%xmm10,%xmm10
- vpaddd %xmm8,%xmm13,%xmm13
- vpor %xmm5,%xmm2,%xmm2
- vpor %xmm7,%xmm10,%xmm10
- vpxor %xmm0,%xmm3,%xmm3
- vmovdqa 0-128(%rax),%xmm0
-
- vpslld $5,%xmm13,%xmm8
- vpaddd %xmm15,%xmm12,%xmm12
- vpxor %xmm14,%xmm11,%xmm6
- vpaddd %xmm2,%xmm12,%xmm12
- vpxor 96-128(%rax),%xmm3,%xmm3
- vpsrld $27,%xmm13,%xmm9
- vpxor %xmm10,%xmm6,%xmm6
- vpxor %xmm0,%xmm3,%xmm3
-
- vpslld $30,%xmm14,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm12,%xmm12
- vpsrld $31,%xmm3,%xmm5
- vpaddd %xmm3,%xmm3,%xmm3
-
- vpsrld $2,%xmm14,%xmm14
- vpaddd %xmm8,%xmm12,%xmm12
- vpor %xmm5,%xmm3,%xmm3
- vpor %xmm7,%xmm14,%xmm14
- vpxor %xmm1,%xmm4,%xmm4
- vmovdqa 16-128(%rax),%xmm1
-
- vpslld $5,%xmm12,%xmm8
- vpaddd %xmm15,%xmm11,%xmm11
- vpxor %xmm13,%xmm10,%xmm6
- vpaddd %xmm3,%xmm11,%xmm11
- vpxor 112-128(%rax),%xmm4,%xmm4
- vpsrld $27,%xmm12,%xmm9
- vpxor %xmm14,%xmm6,%xmm6
- vpxor %xmm1,%xmm4,%xmm4
-
- vpslld $30,%xmm13,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm11,%xmm11
- vpsrld $31,%xmm4,%xmm5
- vpaddd %xmm4,%xmm4,%xmm4
-
- vpsrld $2,%xmm13,%xmm13
- vpaddd %xmm8,%xmm11,%xmm11
- vpor %xmm5,%xmm4,%xmm4
- vpor %xmm7,%xmm13,%xmm13
- vpslld $5,%xmm11,%xmm8
- vpaddd %xmm15,%xmm10,%xmm10
- vpxor %xmm12,%xmm14,%xmm6
-
- vpsrld $27,%xmm11,%xmm9
- vpaddd %xmm4,%xmm10,%xmm10
- vpxor %xmm13,%xmm6,%xmm6
-
- vpslld $30,%xmm12,%xmm7
- vpor %xmm9,%xmm8,%xmm8
- vpaddd %xmm6,%xmm10,%xmm10
-
- vpsrld $2,%xmm12,%xmm12
- vpaddd %xmm8,%xmm10,%xmm10
- vpor %xmm7,%xmm12,%xmm12
- movl $1,%ecx
- cmpl 0(%rbx),%ecx
- cmovgeq %rbp,%r8
- cmpl 4(%rbx),%ecx
- cmovgeq %rbp,%r9
- cmpl 8(%rbx),%ecx
- cmovgeq %rbp,%r10
- cmpl 12(%rbx),%ecx
- cmovgeq %rbp,%r11
- vmovdqu (%rbx),%xmm6
- vpxor %xmm8,%xmm8,%xmm8
- vmovdqa %xmm6,%xmm7
- vpcmpgtd %xmm8,%xmm7,%xmm7
- vpaddd %xmm7,%xmm6,%xmm6
-
- vpand %xmm7,%xmm10,%xmm10
- vpand %xmm7,%xmm11,%xmm11
- vpaddd 0(%rdi),%xmm10,%xmm10
- vpand %xmm7,%xmm12,%xmm12
- vpaddd 32(%rdi),%xmm11,%xmm11
- vpand %xmm7,%xmm13,%xmm13
- vpaddd 64(%rdi),%xmm12,%xmm12
- vpand %xmm7,%xmm14,%xmm14
- vpaddd 96(%rdi),%xmm13,%xmm13
- vpaddd 128(%rdi),%xmm14,%xmm14
- vmovdqu %xmm10,0(%rdi)
- vmovdqu %xmm11,32(%rdi)
- vmovdqu %xmm12,64(%rdi)
- vmovdqu %xmm13,96(%rdi)
- vmovdqu %xmm14,128(%rdi)
-
- vmovdqu %xmm6,(%rbx)
- vmovdqu 96(%rbp),%xmm5
- decl %edx
- jnz .Loop_avx
-
- movl 280(%rsp),%edx
- leaq 16(%rdi),%rdi
- leaq 64(%rsi),%rsi
- decl %edx
- jnz .Loop_grande_avx
-
-.Ldone_avx:
- movq 272(%rsp),%rax
-.cfi_def_cfa %rax,8
- vzeroupper
- movq -16(%rax),%rbp
-.cfi_restore %rbp
- movq -8(%rax),%rbx
-.cfi_restore %rbx
- leaq (%rax),%rsp
-.cfi_def_cfa_register %rsp
-.Lepilogue_avx:
- .byte 0xf3,0xc3
-.cfi_endproc
-.size sha1_multi_block_avx,.-sha1_multi_block_avx
-.type sha1_multi_block_avx2,@function
-.align 32
-sha1_multi_block_avx2:
-.cfi_startproc
-_avx2_shortcut:
- movq %rsp,%rax
-.cfi_def_cfa_register %rax
- pushq %rbx
-.cfi_offset %rbx,-16
- pushq %rbp
-.cfi_offset %rbp,-24
- pushq %r12
-.cfi_offset %r12,-32
- pushq %r13
-.cfi_offset %r13,-40
- pushq %r14
-.cfi_offset %r14,-48
- pushq %r15
-.cfi_offset %r15,-56
- subq $576,%rsp
- andq $-256,%rsp
- movq %rax,544(%rsp)
-.cfi_escape 0x0f,0x06,0x77,0xa0,0x04,0x06,0x23,0x08
-.Lbody_avx2:
- leaq K_XX_XX(%rip),%rbp
- shrl $1,%edx
-
- vzeroupper
-.Loop_grande_avx2:
- movl %edx,552(%rsp)
- xorl %edx,%edx
- leaq 512(%rsp),%rbx
- movq 0(%rsi),%r12
- movl 8(%rsi),%ecx
- cmpl %edx,%ecx
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- movl %ecx,0(%rbx)
- cmovleq %rbp,%r12
- movq 16(%rsi),%r13
- movl 24(%rsi),%ecx
- cmpl %edx,%ecx
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- movl %ecx,4(%rbx)
- cmovleq %rbp,%r13
- movq 32(%rsi),%r14
- movl 40(%rsi),%ecx
- cmpl %edx,%ecx
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- movl %ecx,8(%rbx)
- cmovleq %rbp,%r14
- movq 48(%rsi),%r15
- movl 56(%rsi),%ecx
- cmpl %edx,%ecx
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- movl %ecx,12(%rbx)
- cmovleq %rbp,%r15
- movq 64(%rsi),%r8
- movl 72(%rsi),%ecx
- cmpl %edx,%ecx
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- movl %ecx,16(%rbx)
- cmovleq %rbp,%r8
- movq 80(%rsi),%r9
- movl 88(%rsi),%ecx
- cmpl %edx,%ecx
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- movl %ecx,20(%rbx)
- cmovleq %rbp,%r9
- movq 96(%rsi),%r10
- movl 104(%rsi),%ecx
- cmpl %edx,%ecx
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- movl %ecx,24(%rbx)
- cmovleq %rbp,%r10
- movq 112(%rsi),%r11
- movl 120(%rsi),%ecx
- cmpl %edx,%ecx
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- movl %ecx,28(%rbx)
- cmovleq %rbp,%r11
- vmovdqu 0(%rdi),%ymm0
- leaq 128(%rsp),%rax
- vmovdqu 32(%rdi),%ymm1
- leaq 256+128(%rsp),%rbx
- vmovdqu 64(%rdi),%ymm2
- vmovdqu 96(%rdi),%ymm3
- vmovdqu 128(%rdi),%ymm4
- vmovdqu 96(%rbp),%ymm9
- jmp .Loop_avx2
-
-.align 32
-.Loop_avx2:
- vmovdqa -32(%rbp),%ymm15
- vmovd (%r12),%xmm10
- leaq 64(%r12),%r12
- vmovd (%r8),%xmm12
- leaq 64(%r8),%r8
- vmovd (%r13),%xmm7
- leaq 64(%r13),%r13
- vmovd (%r9),%xmm6
- leaq 64(%r9),%r9
- vpinsrd $1,(%r14),%xmm10,%xmm10
- leaq 64(%r14),%r14
- vpinsrd $1,(%r10),%xmm12,%xmm12
- leaq 64(%r10),%r10
- vpinsrd $1,(%r15),%xmm7,%xmm7
- leaq 64(%r15),%r15
- vpunpckldq %ymm7,%ymm10,%ymm10
- vpinsrd $1,(%r11),%xmm6,%xmm6
- leaq 64(%r11),%r11
- vpunpckldq %ymm6,%ymm12,%ymm12
- vmovd -60(%r12),%xmm11
- vinserti128 $1,%xmm12,%ymm10,%ymm10
- vmovd -60(%r8),%xmm8
- vpshufb %ymm9,%ymm10,%ymm10
- vmovd -60(%r13),%xmm7
- vmovd -60(%r9),%xmm6
- vpinsrd $1,-60(%r14),%xmm11,%xmm11
- vpinsrd $1,-60(%r10),%xmm8,%xmm8
- vpinsrd $1,-60(%r15),%xmm7,%xmm7
- vpunpckldq %ymm7,%ymm11,%ymm11
- vpinsrd $1,-60(%r11),%xmm6,%xmm6
- vpunpckldq %ymm6,%ymm8,%ymm8
- vpaddd %ymm15,%ymm4,%ymm4
- vpslld $5,%ymm0,%ymm7
- vpandn %ymm3,%ymm1,%ymm6
- vpand %ymm2,%ymm1,%ymm5
-
- vmovdqa %ymm10,0-128(%rax)
- vpaddd %ymm10,%ymm4,%ymm4
- vinserti128 $1,%xmm8,%ymm11,%ymm11
- vpsrld $27,%ymm0,%ymm8
- vpxor %ymm6,%ymm5,%ymm5
- vmovd -56(%r12),%xmm12
-
- vpslld $30,%ymm1,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vmovd -56(%r8),%xmm8
- vpaddd %ymm5,%ymm4,%ymm4
-
- vpsrld $2,%ymm1,%ymm1
- vpaddd %ymm7,%ymm4,%ymm4
- vpshufb %ymm9,%ymm11,%ymm11
- vpor %ymm6,%ymm1,%ymm1
- vmovd -56(%r13),%xmm7
- vmovd -56(%r9),%xmm6
- vpinsrd $1,-56(%r14),%xmm12,%xmm12
- vpinsrd $1,-56(%r10),%xmm8,%xmm8
- vpinsrd $1,-56(%r15),%xmm7,%xmm7
- vpunpckldq %ymm7,%ymm12,%ymm12
- vpinsrd $1,-56(%r11),%xmm6,%xmm6
- vpunpckldq %ymm6,%ymm8,%ymm8
- vpaddd %ymm15,%ymm3,%ymm3
- vpslld $5,%ymm4,%ymm7
- vpandn %ymm2,%ymm0,%ymm6
- vpand %ymm1,%ymm0,%ymm5
-
- vmovdqa %ymm11,32-128(%rax)
- vpaddd %ymm11,%ymm3,%ymm3
- vinserti128 $1,%xmm8,%ymm12,%ymm12
- vpsrld $27,%ymm4,%ymm8
- vpxor %ymm6,%ymm5,%ymm5
- vmovd -52(%r12),%xmm13
-
- vpslld $30,%ymm0,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vmovd -52(%r8),%xmm8
- vpaddd %ymm5,%ymm3,%ymm3
-
- vpsrld $2,%ymm0,%ymm0
- vpaddd %ymm7,%ymm3,%ymm3
- vpshufb %ymm9,%ymm12,%ymm12
- vpor %ymm6,%ymm0,%ymm0
- vmovd -52(%r13),%xmm7
- vmovd -52(%r9),%xmm6
- vpinsrd $1,-52(%r14),%xmm13,%xmm13
- vpinsrd $1,-52(%r10),%xmm8,%xmm8
- vpinsrd $1,-52(%r15),%xmm7,%xmm7
- vpunpckldq %ymm7,%ymm13,%ymm13
- vpinsrd $1,-52(%r11),%xmm6,%xmm6
- vpunpckldq %ymm6,%ymm8,%ymm8
- vpaddd %ymm15,%ymm2,%ymm2
- vpslld $5,%ymm3,%ymm7
- vpandn %ymm1,%ymm4,%ymm6
- vpand %ymm0,%ymm4,%ymm5
-
- vmovdqa %ymm12,64-128(%rax)
- vpaddd %ymm12,%ymm2,%ymm2
- vinserti128 $1,%xmm8,%ymm13,%ymm13
- vpsrld $27,%ymm3,%ymm8
- vpxor %ymm6,%ymm5,%ymm5
- vmovd -48(%r12),%xmm14
-
- vpslld $30,%ymm4,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vmovd -48(%r8),%xmm8
- vpaddd %ymm5,%ymm2,%ymm2
-
- vpsrld $2,%ymm4,%ymm4
- vpaddd %ymm7,%ymm2,%ymm2
- vpshufb %ymm9,%ymm13,%ymm13
- vpor %ymm6,%ymm4,%ymm4
- vmovd -48(%r13),%xmm7
- vmovd -48(%r9),%xmm6
- vpinsrd $1,-48(%r14),%xmm14,%xmm14
- vpinsrd $1,-48(%r10),%xmm8,%xmm8
- vpinsrd $1,-48(%r15),%xmm7,%xmm7
- vpunpckldq %ymm7,%ymm14,%ymm14
- vpinsrd $1,-48(%r11),%xmm6,%xmm6
- vpunpckldq %ymm6,%ymm8,%ymm8
- vpaddd %ymm15,%ymm1,%ymm1
- vpslld $5,%ymm2,%ymm7
- vpandn %ymm0,%ymm3,%ymm6
- vpand %ymm4,%ymm3,%ymm5
-
- vmovdqa %ymm13,96-128(%rax)
- vpaddd %ymm13,%ymm1,%ymm1
- vinserti128 $1,%xmm8,%ymm14,%ymm14
- vpsrld $27,%ymm2,%ymm8
- vpxor %ymm6,%ymm5,%ymm5
- vmovd -44(%r12),%xmm10
-
- vpslld $30,%ymm3,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vmovd -44(%r8),%xmm8
- vpaddd %ymm5,%ymm1,%ymm1
-
- vpsrld $2,%ymm3,%ymm3
- vpaddd %ymm7,%ymm1,%ymm1
- vpshufb %ymm9,%ymm14,%ymm14
- vpor %ymm6,%ymm3,%ymm3
- vmovd -44(%r13),%xmm7
- vmovd -44(%r9),%xmm6
- vpinsrd $1,-44(%r14),%xmm10,%xmm10
- vpinsrd $1,-44(%r10),%xmm8,%xmm8
- vpinsrd $1,-44(%r15),%xmm7,%xmm7
- vpunpckldq %ymm7,%ymm10,%ymm10
- vpinsrd $1,-44(%r11),%xmm6,%xmm6
- vpunpckldq %ymm6,%ymm8,%ymm8
- vpaddd %ymm15,%ymm0,%ymm0
- vpslld $5,%ymm1,%ymm7
- vpandn %ymm4,%ymm2,%ymm6
- vpand %ymm3,%ymm2,%ymm5
-
- vmovdqa %ymm14,128-128(%rax)
- vpaddd %ymm14,%ymm0,%ymm0
- vinserti128 $1,%xmm8,%ymm10,%ymm10
- vpsrld $27,%ymm1,%ymm8
- vpxor %ymm6,%ymm5,%ymm5
- vmovd -40(%r12),%xmm11
-
- vpslld $30,%ymm2,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vmovd -40(%r8),%xmm8
- vpaddd %ymm5,%ymm0,%ymm0
-
- vpsrld $2,%ymm2,%ymm2
- vpaddd %ymm7,%ymm0,%ymm0
- vpshufb %ymm9,%ymm10,%ymm10
- vpor %ymm6,%ymm2,%ymm2
- vmovd -40(%r13),%xmm7
- vmovd -40(%r9),%xmm6
- vpinsrd $1,-40(%r14),%xmm11,%xmm11
- vpinsrd $1,-40(%r10),%xmm8,%xmm8
- vpinsrd $1,-40(%r15),%xmm7,%xmm7
- vpunpckldq %ymm7,%ymm11,%ymm11
- vpinsrd $1,-40(%r11),%xmm6,%xmm6
- vpunpckldq %ymm6,%ymm8,%ymm8
- vpaddd %ymm15,%ymm4,%ymm4
- vpslld $5,%ymm0,%ymm7
- vpandn %ymm3,%ymm1,%ymm6
- vpand %ymm2,%ymm1,%ymm5
-
- vmovdqa %ymm10,160-128(%rax)
- vpaddd %ymm10,%ymm4,%ymm4
- vinserti128 $1,%xmm8,%ymm11,%ymm11
- vpsrld $27,%ymm0,%ymm8
- vpxor %ymm6,%ymm5,%ymm5
- vmovd -36(%r12),%xmm12
-
- vpslld $30,%ymm1,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vmovd -36(%r8),%xmm8
- vpaddd %ymm5,%ymm4,%ymm4
-
- vpsrld $2,%ymm1,%ymm1
- vpaddd %ymm7,%ymm4,%ymm4
- vpshufb %ymm9,%ymm11,%ymm11
- vpor %ymm6,%ymm1,%ymm1
- vmovd -36(%r13),%xmm7
- vmovd -36(%r9),%xmm6
- vpinsrd $1,-36(%r14),%xmm12,%xmm12
- vpinsrd $1,-36(%r10),%xmm8,%xmm8
- vpinsrd $1,-36(%r15),%xmm7,%xmm7
- vpunpckldq %ymm7,%ymm12,%ymm12
- vpinsrd $1,-36(%r11),%xmm6,%xmm6
- vpunpckldq %ymm6,%ymm8,%ymm8
- vpaddd %ymm15,%ymm3,%ymm3
- vpslld $5,%ymm4,%ymm7
- vpandn %ymm2,%ymm0,%ymm6
- vpand %ymm1,%ymm0,%ymm5
-
- vmovdqa %ymm11,192-128(%rax)
- vpaddd %ymm11,%ymm3,%ymm3
- vinserti128 $1,%xmm8,%ymm12,%ymm12
- vpsrld $27,%ymm4,%ymm8
- vpxor %ymm6,%ymm5,%ymm5
- vmovd -32(%r12),%xmm13
-
- vpslld $30,%ymm0,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vmovd -32(%r8),%xmm8
- vpaddd %ymm5,%ymm3,%ymm3
-
- vpsrld $2,%ymm0,%ymm0
- vpaddd %ymm7,%ymm3,%ymm3
- vpshufb %ymm9,%ymm12,%ymm12
- vpor %ymm6,%ymm0,%ymm0
- vmovd -32(%r13),%xmm7
- vmovd -32(%r9),%xmm6
- vpinsrd $1,-32(%r14),%xmm13,%xmm13
- vpinsrd $1,-32(%r10),%xmm8,%xmm8
- vpinsrd $1,-32(%r15),%xmm7,%xmm7
- vpunpckldq %ymm7,%ymm13,%ymm13
- vpinsrd $1,-32(%r11),%xmm6,%xmm6
- vpunpckldq %ymm6,%ymm8,%ymm8
- vpaddd %ymm15,%ymm2,%ymm2
- vpslld $5,%ymm3,%ymm7
- vpandn %ymm1,%ymm4,%ymm6
- vpand %ymm0,%ymm4,%ymm5
-
- vmovdqa %ymm12,224-128(%rax)
- vpaddd %ymm12,%ymm2,%ymm2
- vinserti128 $1,%xmm8,%ymm13,%ymm13
- vpsrld $27,%ymm3,%ymm8
- vpxor %ymm6,%ymm5,%ymm5
- vmovd -28(%r12),%xmm14
-
- vpslld $30,%ymm4,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vmovd -28(%r8),%xmm8
- vpaddd %ymm5,%ymm2,%ymm2
-
- vpsrld $2,%ymm4,%ymm4
- vpaddd %ymm7,%ymm2,%ymm2
- vpshufb %ymm9,%ymm13,%ymm13
- vpor %ymm6,%ymm4,%ymm4
- vmovd -28(%r13),%xmm7
- vmovd -28(%r9),%xmm6
- vpinsrd $1,-28(%r14),%xmm14,%xmm14
- vpinsrd $1,-28(%r10),%xmm8,%xmm8
- vpinsrd $1,-28(%r15),%xmm7,%xmm7
- vpunpckldq %ymm7,%ymm14,%ymm14
- vpinsrd $1,-28(%r11),%xmm6,%xmm6
- vpunpckldq %ymm6,%ymm8,%ymm8
- vpaddd %ymm15,%ymm1,%ymm1
- vpslld $5,%ymm2,%ymm7
- vpandn %ymm0,%ymm3,%ymm6
- vpand %ymm4,%ymm3,%ymm5
-
- vmovdqa %ymm13,256-256-128(%rbx)
- vpaddd %ymm13,%ymm1,%ymm1
- vinserti128 $1,%xmm8,%ymm14,%ymm14
- vpsrld $27,%ymm2,%ymm8
- vpxor %ymm6,%ymm5,%ymm5
- vmovd -24(%r12),%xmm10
-
- vpslld $30,%ymm3,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vmovd -24(%r8),%xmm8
- vpaddd %ymm5,%ymm1,%ymm1
-
- vpsrld $2,%ymm3,%ymm3
- vpaddd %ymm7,%ymm1,%ymm1
- vpshufb %ymm9,%ymm14,%ymm14
- vpor %ymm6,%ymm3,%ymm3
- vmovd -24(%r13),%xmm7
- vmovd -24(%r9),%xmm6
- vpinsrd $1,-24(%r14),%xmm10,%xmm10
- vpinsrd $1,-24(%r10),%xmm8,%xmm8
- vpinsrd $1,-24(%r15),%xmm7,%xmm7
- vpunpckldq %ymm7,%ymm10,%ymm10
- vpinsrd $1,-24(%r11),%xmm6,%xmm6
- vpunpckldq %ymm6,%ymm8,%ymm8
- vpaddd %ymm15,%ymm0,%ymm0
- vpslld $5,%ymm1,%ymm7
- vpandn %ymm4,%ymm2,%ymm6
- vpand %ymm3,%ymm2,%ymm5
-
- vmovdqa %ymm14,288-256-128(%rbx)
- vpaddd %ymm14,%ymm0,%ymm0
- vinserti128 $1,%xmm8,%ymm10,%ymm10
- vpsrld $27,%ymm1,%ymm8
- vpxor %ymm6,%ymm5,%ymm5
- vmovd -20(%r12),%xmm11
-
- vpslld $30,%ymm2,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vmovd -20(%r8),%xmm8
- vpaddd %ymm5,%ymm0,%ymm0
-
- vpsrld $2,%ymm2,%ymm2
- vpaddd %ymm7,%ymm0,%ymm0
- vpshufb %ymm9,%ymm10,%ymm10
- vpor %ymm6,%ymm2,%ymm2
- vmovd -20(%r13),%xmm7
- vmovd -20(%r9),%xmm6
- vpinsrd $1,-20(%r14),%xmm11,%xmm11
- vpinsrd $1,-20(%r10),%xmm8,%xmm8
- vpinsrd $1,-20(%r15),%xmm7,%xmm7
- vpunpckldq %ymm7,%ymm11,%ymm11
- vpinsrd $1,-20(%r11),%xmm6,%xmm6
- vpunpckldq %ymm6,%ymm8,%ymm8
- vpaddd %ymm15,%ymm4,%ymm4
- vpslld $5,%ymm0,%ymm7
- vpandn %ymm3,%ymm1,%ymm6
- vpand %ymm2,%ymm1,%ymm5
-
- vmovdqa %ymm10,320-256-128(%rbx)
- vpaddd %ymm10,%ymm4,%ymm4
- vinserti128 $1,%xmm8,%ymm11,%ymm11
- vpsrld $27,%ymm0,%ymm8
- vpxor %ymm6,%ymm5,%ymm5
- vmovd -16(%r12),%xmm12
-
- vpslld $30,%ymm1,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vmovd -16(%r8),%xmm8
- vpaddd %ymm5,%ymm4,%ymm4
-
- vpsrld $2,%ymm1,%ymm1
- vpaddd %ymm7,%ymm4,%ymm4
- vpshufb %ymm9,%ymm11,%ymm11
- vpor %ymm6,%ymm1,%ymm1
- vmovd -16(%r13),%xmm7
- vmovd -16(%r9),%xmm6
- vpinsrd $1,-16(%r14),%xmm12,%xmm12
- vpinsrd $1,-16(%r10),%xmm8,%xmm8
- vpinsrd $1,-16(%r15),%xmm7,%xmm7
- vpunpckldq %ymm7,%ymm12,%ymm12
- vpinsrd $1,-16(%r11),%xmm6,%xmm6
- vpunpckldq %ymm6,%ymm8,%ymm8
- vpaddd %ymm15,%ymm3,%ymm3
- vpslld $5,%ymm4,%ymm7
- vpandn %ymm2,%ymm0,%ymm6
- vpand %ymm1,%ymm0,%ymm5
-
- vmovdqa %ymm11,352-256-128(%rbx)
- vpaddd %ymm11,%ymm3,%ymm3
- vinserti128 $1,%xmm8,%ymm12,%ymm12
- vpsrld $27,%ymm4,%ymm8
- vpxor %ymm6,%ymm5,%ymm5
- vmovd -12(%r12),%xmm13
-
- vpslld $30,%ymm0,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vmovd -12(%r8),%xmm8
- vpaddd %ymm5,%ymm3,%ymm3
-
- vpsrld $2,%ymm0,%ymm0
- vpaddd %ymm7,%ymm3,%ymm3
- vpshufb %ymm9,%ymm12,%ymm12
- vpor %ymm6,%ymm0,%ymm0
- vmovd -12(%r13),%xmm7
- vmovd -12(%r9),%xmm6
- vpinsrd $1,-12(%r14),%xmm13,%xmm13
- vpinsrd $1,-12(%r10),%xmm8,%xmm8
- vpinsrd $1,-12(%r15),%xmm7,%xmm7
- vpunpckldq %ymm7,%ymm13,%ymm13
- vpinsrd $1,-12(%r11),%xmm6,%xmm6
- vpunpckldq %ymm6,%ymm8,%ymm8
- vpaddd %ymm15,%ymm2,%ymm2
- vpslld $5,%ymm3,%ymm7
- vpandn %ymm1,%ymm4,%ymm6
- vpand %ymm0,%ymm4,%ymm5
-
- vmovdqa %ymm12,384-256-128(%rbx)
- vpaddd %ymm12,%ymm2,%ymm2
- vinserti128 $1,%xmm8,%ymm13,%ymm13
- vpsrld $27,%ymm3,%ymm8
- vpxor %ymm6,%ymm5,%ymm5
- vmovd -8(%r12),%xmm14
-
- vpslld $30,%ymm4,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vmovd -8(%r8),%xmm8
- vpaddd %ymm5,%ymm2,%ymm2
-
- vpsrld $2,%ymm4,%ymm4
- vpaddd %ymm7,%ymm2,%ymm2
- vpshufb %ymm9,%ymm13,%ymm13
- vpor %ymm6,%ymm4,%ymm4
- vmovd -8(%r13),%xmm7
- vmovd -8(%r9),%xmm6
- vpinsrd $1,-8(%r14),%xmm14,%xmm14
- vpinsrd $1,-8(%r10),%xmm8,%xmm8
- vpinsrd $1,-8(%r15),%xmm7,%xmm7
- vpunpckldq %ymm7,%ymm14,%ymm14
- vpinsrd $1,-8(%r11),%xmm6,%xmm6
- vpunpckldq %ymm6,%ymm8,%ymm8
- vpaddd %ymm15,%ymm1,%ymm1
- vpslld $5,%ymm2,%ymm7
- vpandn %ymm0,%ymm3,%ymm6
- vpand %ymm4,%ymm3,%ymm5
-
- vmovdqa %ymm13,416-256-128(%rbx)
- vpaddd %ymm13,%ymm1,%ymm1
- vinserti128 $1,%xmm8,%ymm14,%ymm14
- vpsrld $27,%ymm2,%ymm8
- vpxor %ymm6,%ymm5,%ymm5
- vmovd -4(%r12),%xmm10
-
- vpslld $30,%ymm3,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vmovd -4(%r8),%xmm8
- vpaddd %ymm5,%ymm1,%ymm1
-
- vpsrld $2,%ymm3,%ymm3
- vpaddd %ymm7,%ymm1,%ymm1
- vpshufb %ymm9,%ymm14,%ymm14
- vpor %ymm6,%ymm3,%ymm3
- vmovdqa 0-128(%rax),%ymm11
- vmovd -4(%r13),%xmm7
- vmovd -4(%r9),%xmm6
- vpinsrd $1,-4(%r14),%xmm10,%xmm10
- vpinsrd $1,-4(%r10),%xmm8,%xmm8
- vpinsrd $1,-4(%r15),%xmm7,%xmm7
- vpunpckldq %ymm7,%ymm10,%ymm10
- vpinsrd $1,-4(%r11),%xmm6,%xmm6
- vpunpckldq %ymm6,%ymm8,%ymm8
- vpaddd %ymm15,%ymm0,%ymm0
- prefetcht0 63(%r12)
- vpslld $5,%ymm1,%ymm7
- vpandn %ymm4,%ymm2,%ymm6
- vpand %ymm3,%ymm2,%ymm5
-
- vmovdqa %ymm14,448-256-128(%rbx)
- vpaddd %ymm14,%ymm0,%ymm0
- vinserti128 $1,%xmm8,%ymm10,%ymm10
- vpsrld $27,%ymm1,%ymm8
- prefetcht0 63(%r13)
- vpxor %ymm6,%ymm5,%ymm5
-
- vpslld $30,%ymm2,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- prefetcht0 63(%r14)
- vpaddd %ymm5,%ymm0,%ymm0
-
- vpsrld $2,%ymm2,%ymm2
- vpaddd %ymm7,%ymm0,%ymm0
- prefetcht0 63(%r15)
- vpshufb %ymm9,%ymm10,%ymm10
- vpor %ymm6,%ymm2,%ymm2
- vmovdqa 32-128(%rax),%ymm12
- vpxor %ymm13,%ymm11,%ymm11
- vmovdqa 64-128(%rax),%ymm13
-
- vpaddd %ymm15,%ymm4,%ymm4
- vpslld $5,%ymm0,%ymm7
- vpandn %ymm3,%ymm1,%ymm6
- prefetcht0 63(%r8)
- vpand %ymm2,%ymm1,%ymm5
-
- vmovdqa %ymm10,480-256-128(%rbx)
- vpaddd %ymm10,%ymm4,%ymm4
- vpxor 256-256-128(%rbx),%ymm11,%ymm11
- vpsrld $27,%ymm0,%ymm8
- vpxor %ymm6,%ymm5,%ymm5
- vpxor %ymm13,%ymm11,%ymm11
- prefetcht0 63(%r9)
-
- vpslld $30,%ymm1,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm4,%ymm4
- prefetcht0 63(%r10)
- vpsrld $31,%ymm11,%ymm9
- vpaddd %ymm11,%ymm11,%ymm11
-
- vpsrld $2,%ymm1,%ymm1
- prefetcht0 63(%r11)
- vpaddd %ymm7,%ymm4,%ymm4
- vpor %ymm9,%ymm11,%ymm11
- vpor %ymm6,%ymm1,%ymm1
- vpxor %ymm14,%ymm12,%ymm12
- vmovdqa 96-128(%rax),%ymm14
-
- vpaddd %ymm15,%ymm3,%ymm3
- vpslld $5,%ymm4,%ymm7
- vpandn %ymm2,%ymm0,%ymm6
-
- vpand %ymm1,%ymm0,%ymm5
-
- vmovdqa %ymm11,0-128(%rax)
- vpaddd %ymm11,%ymm3,%ymm3
- vpxor 288-256-128(%rbx),%ymm12,%ymm12
- vpsrld $27,%ymm4,%ymm8
- vpxor %ymm6,%ymm5,%ymm5
- vpxor %ymm14,%ymm12,%ymm12
-
-
- vpslld $30,%ymm0,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm3,%ymm3
-
- vpsrld $31,%ymm12,%ymm9
- vpaddd %ymm12,%ymm12,%ymm12
-
- vpsrld $2,%ymm0,%ymm0
-
- vpaddd %ymm7,%ymm3,%ymm3
- vpor %ymm9,%ymm12,%ymm12
- vpor %ymm6,%ymm0,%ymm0
- vpxor %ymm10,%ymm13,%ymm13
- vmovdqa 128-128(%rax),%ymm10
-
- vpaddd %ymm15,%ymm2,%ymm2
- vpslld $5,%ymm3,%ymm7
- vpandn %ymm1,%ymm4,%ymm6
-
- vpand %ymm0,%ymm4,%ymm5
-
- vmovdqa %ymm12,32-128(%rax)
- vpaddd %ymm12,%ymm2,%ymm2
- vpxor 320-256-128(%rbx),%ymm13,%ymm13
- vpsrld $27,%ymm3,%ymm8
- vpxor %ymm6,%ymm5,%ymm5
- vpxor %ymm10,%ymm13,%ymm13
-
-
- vpslld $30,%ymm4,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm2,%ymm2
-
- vpsrld $31,%ymm13,%ymm9
- vpaddd %ymm13,%ymm13,%ymm13
-
- vpsrld $2,%ymm4,%ymm4
-
- vpaddd %ymm7,%ymm2,%ymm2
- vpor %ymm9,%ymm13,%ymm13
- vpor %ymm6,%ymm4,%ymm4
- vpxor %ymm11,%ymm14,%ymm14
- vmovdqa 160-128(%rax),%ymm11
-
- vpaddd %ymm15,%ymm1,%ymm1
- vpslld $5,%ymm2,%ymm7
- vpandn %ymm0,%ymm3,%ymm6
-
- vpand %ymm4,%ymm3,%ymm5
-
- vmovdqa %ymm13,64-128(%rax)
- vpaddd %ymm13,%ymm1,%ymm1
- vpxor 352-256-128(%rbx),%ymm14,%ymm14
- vpsrld $27,%ymm2,%ymm8
- vpxor %ymm6,%ymm5,%ymm5
- vpxor %ymm11,%ymm14,%ymm14
-
-
- vpslld $30,%ymm3,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm1,%ymm1
-
- vpsrld $31,%ymm14,%ymm9
- vpaddd %ymm14,%ymm14,%ymm14
-
- vpsrld $2,%ymm3,%ymm3
-
- vpaddd %ymm7,%ymm1,%ymm1
- vpor %ymm9,%ymm14,%ymm14
- vpor %ymm6,%ymm3,%ymm3
- vpxor %ymm12,%ymm10,%ymm10
- vmovdqa 192-128(%rax),%ymm12
-
- vpaddd %ymm15,%ymm0,%ymm0
- vpslld $5,%ymm1,%ymm7
- vpandn %ymm4,%ymm2,%ymm6
-
- vpand %ymm3,%ymm2,%ymm5
-
- vmovdqa %ymm14,96-128(%rax)
- vpaddd %ymm14,%ymm0,%ymm0
- vpxor 384-256-128(%rbx),%ymm10,%ymm10
- vpsrld $27,%ymm1,%ymm8
- vpxor %ymm6,%ymm5,%ymm5
- vpxor %ymm12,%ymm10,%ymm10
-
-
- vpslld $30,%ymm2,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm0,%ymm0
-
- vpsrld $31,%ymm10,%ymm9
- vpaddd %ymm10,%ymm10,%ymm10
-
- vpsrld $2,%ymm2,%ymm2
-
- vpaddd %ymm7,%ymm0,%ymm0
- vpor %ymm9,%ymm10,%ymm10
- vpor %ymm6,%ymm2,%ymm2
- vmovdqa 0(%rbp),%ymm15
- vpxor %ymm13,%ymm11,%ymm11
- vmovdqa 224-128(%rax),%ymm13
-
- vpslld $5,%ymm0,%ymm7
- vpaddd %ymm15,%ymm4,%ymm4
- vpxor %ymm1,%ymm3,%ymm5
- vmovdqa %ymm10,128-128(%rax)
- vpaddd %ymm10,%ymm4,%ymm4
- vpxor 416-256-128(%rbx),%ymm11,%ymm11
- vpsrld $27,%ymm0,%ymm8
- vpxor %ymm2,%ymm5,%ymm5
- vpxor %ymm13,%ymm11,%ymm11
-
- vpslld $30,%ymm1,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm4,%ymm4
- vpsrld $31,%ymm11,%ymm9
- vpaddd %ymm11,%ymm11,%ymm11
-
- vpsrld $2,%ymm1,%ymm1
- vpaddd %ymm7,%ymm4,%ymm4
- vpor %ymm9,%ymm11,%ymm11
- vpor %ymm6,%ymm1,%ymm1
- vpxor %ymm14,%ymm12,%ymm12
- vmovdqa 256-256-128(%rbx),%ymm14
-
- vpslld $5,%ymm4,%ymm7
- vpaddd %ymm15,%ymm3,%ymm3
- vpxor %ymm0,%ymm2,%ymm5
- vmovdqa %ymm11,160-128(%rax)
- vpaddd %ymm11,%ymm3,%ymm3
- vpxor 448-256-128(%rbx),%ymm12,%ymm12
- vpsrld $27,%ymm4,%ymm8
- vpxor %ymm1,%ymm5,%ymm5
- vpxor %ymm14,%ymm12,%ymm12
-
- vpslld $30,%ymm0,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm3,%ymm3
- vpsrld $31,%ymm12,%ymm9
- vpaddd %ymm12,%ymm12,%ymm12
-
- vpsrld $2,%ymm0,%ymm0
- vpaddd %ymm7,%ymm3,%ymm3
- vpor %ymm9,%ymm12,%ymm12
- vpor %ymm6,%ymm0,%ymm0
- vpxor %ymm10,%ymm13,%ymm13
- vmovdqa 288-256-128(%rbx),%ymm10
-
- vpslld $5,%ymm3,%ymm7
- vpaddd %ymm15,%ymm2,%ymm2
- vpxor %ymm4,%ymm1,%ymm5
- vmovdqa %ymm12,192-128(%rax)
- vpaddd %ymm12,%ymm2,%ymm2
- vpxor 480-256-128(%rbx),%ymm13,%ymm13
- vpsrld $27,%ymm3,%ymm8
- vpxor %ymm0,%ymm5,%ymm5
- vpxor %ymm10,%ymm13,%ymm13
-
- vpslld $30,%ymm4,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm2,%ymm2
- vpsrld $31,%ymm13,%ymm9
- vpaddd %ymm13,%ymm13,%ymm13
-
- vpsrld $2,%ymm4,%ymm4
- vpaddd %ymm7,%ymm2,%ymm2
- vpor %ymm9,%ymm13,%ymm13
- vpor %ymm6,%ymm4,%ymm4
- vpxor %ymm11,%ymm14,%ymm14
- vmovdqa 320-256-128(%rbx),%ymm11
-
- vpslld $5,%ymm2,%ymm7
- vpaddd %ymm15,%ymm1,%ymm1
- vpxor %ymm3,%ymm0,%ymm5
- vmovdqa %ymm13,224-128(%rax)
- vpaddd %ymm13,%ymm1,%ymm1
- vpxor 0-128(%rax),%ymm14,%ymm14
- vpsrld $27,%ymm2,%ymm8
- vpxor %ymm4,%ymm5,%ymm5
- vpxor %ymm11,%ymm14,%ymm14
-
- vpslld $30,%ymm3,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm1,%ymm1
- vpsrld $31,%ymm14,%ymm9
- vpaddd %ymm14,%ymm14,%ymm14
-
- vpsrld $2,%ymm3,%ymm3
- vpaddd %ymm7,%ymm1,%ymm1
- vpor %ymm9,%ymm14,%ymm14
- vpor %ymm6,%ymm3,%ymm3
- vpxor %ymm12,%ymm10,%ymm10
- vmovdqa 352-256-128(%rbx),%ymm12
-
- vpslld $5,%ymm1,%ymm7
- vpaddd %ymm15,%ymm0,%ymm0
- vpxor %ymm2,%ymm4,%ymm5
- vmovdqa %ymm14,256-256-128(%rbx)
- vpaddd %ymm14,%ymm0,%ymm0
- vpxor 32-128(%rax),%ymm10,%ymm10
- vpsrld $27,%ymm1,%ymm8
- vpxor %ymm3,%ymm5,%ymm5
- vpxor %ymm12,%ymm10,%ymm10
-
- vpslld $30,%ymm2,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm0,%ymm0
- vpsrld $31,%ymm10,%ymm9
- vpaddd %ymm10,%ymm10,%ymm10
-
- vpsrld $2,%ymm2,%ymm2
- vpaddd %ymm7,%ymm0,%ymm0
- vpor %ymm9,%ymm10,%ymm10
- vpor %ymm6,%ymm2,%ymm2
- vpxor %ymm13,%ymm11,%ymm11
- vmovdqa 384-256-128(%rbx),%ymm13
-
- vpslld $5,%ymm0,%ymm7
- vpaddd %ymm15,%ymm4,%ymm4
- vpxor %ymm1,%ymm3,%ymm5
- vmovdqa %ymm10,288-256-128(%rbx)
- vpaddd %ymm10,%ymm4,%ymm4
- vpxor 64-128(%rax),%ymm11,%ymm11
- vpsrld $27,%ymm0,%ymm8
- vpxor %ymm2,%ymm5,%ymm5
- vpxor %ymm13,%ymm11,%ymm11
-
- vpslld $30,%ymm1,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm4,%ymm4
- vpsrld $31,%ymm11,%ymm9
- vpaddd %ymm11,%ymm11,%ymm11
-
- vpsrld $2,%ymm1,%ymm1
- vpaddd %ymm7,%ymm4,%ymm4
- vpor %ymm9,%ymm11,%ymm11
- vpor %ymm6,%ymm1,%ymm1
- vpxor %ymm14,%ymm12,%ymm12
- vmovdqa 416-256-128(%rbx),%ymm14
-
- vpslld $5,%ymm4,%ymm7
- vpaddd %ymm15,%ymm3,%ymm3
- vpxor %ymm0,%ymm2,%ymm5
- vmovdqa %ymm11,320-256-128(%rbx)
- vpaddd %ymm11,%ymm3,%ymm3
- vpxor 96-128(%rax),%ymm12,%ymm12
- vpsrld $27,%ymm4,%ymm8
- vpxor %ymm1,%ymm5,%ymm5
- vpxor %ymm14,%ymm12,%ymm12
-
- vpslld $30,%ymm0,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm3,%ymm3
- vpsrld $31,%ymm12,%ymm9
- vpaddd %ymm12,%ymm12,%ymm12
-
- vpsrld $2,%ymm0,%ymm0
- vpaddd %ymm7,%ymm3,%ymm3
- vpor %ymm9,%ymm12,%ymm12
- vpor %ymm6,%ymm0,%ymm0
- vpxor %ymm10,%ymm13,%ymm13
- vmovdqa 448-256-128(%rbx),%ymm10
-
- vpslld $5,%ymm3,%ymm7
- vpaddd %ymm15,%ymm2,%ymm2
- vpxor %ymm4,%ymm1,%ymm5
- vmovdqa %ymm12,352-256-128(%rbx)
- vpaddd %ymm12,%ymm2,%ymm2
- vpxor 128-128(%rax),%ymm13,%ymm13
- vpsrld $27,%ymm3,%ymm8
- vpxor %ymm0,%ymm5,%ymm5
- vpxor %ymm10,%ymm13,%ymm13
-
- vpslld $30,%ymm4,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm2,%ymm2
- vpsrld $31,%ymm13,%ymm9
- vpaddd %ymm13,%ymm13,%ymm13
-
- vpsrld $2,%ymm4,%ymm4
- vpaddd %ymm7,%ymm2,%ymm2
- vpor %ymm9,%ymm13,%ymm13
- vpor %ymm6,%ymm4,%ymm4
- vpxor %ymm11,%ymm14,%ymm14
- vmovdqa 480-256-128(%rbx),%ymm11
-
- vpslld $5,%ymm2,%ymm7
- vpaddd %ymm15,%ymm1,%ymm1
- vpxor %ymm3,%ymm0,%ymm5
- vmovdqa %ymm13,384-256-128(%rbx)
- vpaddd %ymm13,%ymm1,%ymm1
- vpxor 160-128(%rax),%ymm14,%ymm14
- vpsrld $27,%ymm2,%ymm8
- vpxor %ymm4,%ymm5,%ymm5
- vpxor %ymm11,%ymm14,%ymm14
-
- vpslld $30,%ymm3,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm1,%ymm1
- vpsrld $31,%ymm14,%ymm9
- vpaddd %ymm14,%ymm14,%ymm14
-
- vpsrld $2,%ymm3,%ymm3
- vpaddd %ymm7,%ymm1,%ymm1
- vpor %ymm9,%ymm14,%ymm14
- vpor %ymm6,%ymm3,%ymm3
- vpxor %ymm12,%ymm10,%ymm10
- vmovdqa 0-128(%rax),%ymm12
-
- vpslld $5,%ymm1,%ymm7
- vpaddd %ymm15,%ymm0,%ymm0
- vpxor %ymm2,%ymm4,%ymm5
- vmovdqa %ymm14,416-256-128(%rbx)
- vpaddd %ymm14,%ymm0,%ymm0
- vpxor 192-128(%rax),%ymm10,%ymm10
- vpsrld $27,%ymm1,%ymm8
- vpxor %ymm3,%ymm5,%ymm5
- vpxor %ymm12,%ymm10,%ymm10
-
- vpslld $30,%ymm2,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm0,%ymm0
- vpsrld $31,%ymm10,%ymm9
- vpaddd %ymm10,%ymm10,%ymm10
-
- vpsrld $2,%ymm2,%ymm2
- vpaddd %ymm7,%ymm0,%ymm0
- vpor %ymm9,%ymm10,%ymm10
- vpor %ymm6,%ymm2,%ymm2
- vpxor %ymm13,%ymm11,%ymm11
- vmovdqa 32-128(%rax),%ymm13
-
- vpslld $5,%ymm0,%ymm7
- vpaddd %ymm15,%ymm4,%ymm4
- vpxor %ymm1,%ymm3,%ymm5
- vmovdqa %ymm10,448-256-128(%rbx)
- vpaddd %ymm10,%ymm4,%ymm4
- vpxor 224-128(%rax),%ymm11,%ymm11
- vpsrld $27,%ymm0,%ymm8
- vpxor %ymm2,%ymm5,%ymm5
- vpxor %ymm13,%ymm11,%ymm11
-
- vpslld $30,%ymm1,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm4,%ymm4
- vpsrld $31,%ymm11,%ymm9
- vpaddd %ymm11,%ymm11,%ymm11
-
- vpsrld $2,%ymm1,%ymm1
- vpaddd %ymm7,%ymm4,%ymm4
- vpor %ymm9,%ymm11,%ymm11
- vpor %ymm6,%ymm1,%ymm1
- vpxor %ymm14,%ymm12,%ymm12
- vmovdqa 64-128(%rax),%ymm14
-
- vpslld $5,%ymm4,%ymm7
- vpaddd %ymm15,%ymm3,%ymm3
- vpxor %ymm0,%ymm2,%ymm5
- vmovdqa %ymm11,480-256-128(%rbx)
- vpaddd %ymm11,%ymm3,%ymm3
- vpxor 256-256-128(%rbx),%ymm12,%ymm12
- vpsrld $27,%ymm4,%ymm8
- vpxor %ymm1,%ymm5,%ymm5
- vpxor %ymm14,%ymm12,%ymm12
-
- vpslld $30,%ymm0,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm3,%ymm3
- vpsrld $31,%ymm12,%ymm9
- vpaddd %ymm12,%ymm12,%ymm12
-
- vpsrld $2,%ymm0,%ymm0
- vpaddd %ymm7,%ymm3,%ymm3
- vpor %ymm9,%ymm12,%ymm12
- vpor %ymm6,%ymm0,%ymm0
- vpxor %ymm10,%ymm13,%ymm13
- vmovdqa 96-128(%rax),%ymm10
-
- vpslld $5,%ymm3,%ymm7
- vpaddd %ymm15,%ymm2,%ymm2
- vpxor %ymm4,%ymm1,%ymm5
- vmovdqa %ymm12,0-128(%rax)
- vpaddd %ymm12,%ymm2,%ymm2
- vpxor 288-256-128(%rbx),%ymm13,%ymm13
- vpsrld $27,%ymm3,%ymm8
- vpxor %ymm0,%ymm5,%ymm5
- vpxor %ymm10,%ymm13,%ymm13
-
- vpslld $30,%ymm4,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm2,%ymm2
- vpsrld $31,%ymm13,%ymm9
- vpaddd %ymm13,%ymm13,%ymm13
-
- vpsrld $2,%ymm4,%ymm4
- vpaddd %ymm7,%ymm2,%ymm2
- vpor %ymm9,%ymm13,%ymm13
- vpor %ymm6,%ymm4,%ymm4
- vpxor %ymm11,%ymm14,%ymm14
- vmovdqa 128-128(%rax),%ymm11
-
- vpslld $5,%ymm2,%ymm7
- vpaddd %ymm15,%ymm1,%ymm1
- vpxor %ymm3,%ymm0,%ymm5
- vmovdqa %ymm13,32-128(%rax)
- vpaddd %ymm13,%ymm1,%ymm1
- vpxor 320-256-128(%rbx),%ymm14,%ymm14
- vpsrld $27,%ymm2,%ymm8
- vpxor %ymm4,%ymm5,%ymm5
- vpxor %ymm11,%ymm14,%ymm14
-
- vpslld $30,%ymm3,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm1,%ymm1
- vpsrld $31,%ymm14,%ymm9
- vpaddd %ymm14,%ymm14,%ymm14
-
- vpsrld $2,%ymm3,%ymm3
- vpaddd %ymm7,%ymm1,%ymm1
- vpor %ymm9,%ymm14,%ymm14
- vpor %ymm6,%ymm3,%ymm3
- vpxor %ymm12,%ymm10,%ymm10
- vmovdqa 160-128(%rax),%ymm12
-
- vpslld $5,%ymm1,%ymm7
- vpaddd %ymm15,%ymm0,%ymm0
- vpxor %ymm2,%ymm4,%ymm5
- vmovdqa %ymm14,64-128(%rax)
- vpaddd %ymm14,%ymm0,%ymm0
- vpxor 352-256-128(%rbx),%ymm10,%ymm10
- vpsrld $27,%ymm1,%ymm8
- vpxor %ymm3,%ymm5,%ymm5
- vpxor %ymm12,%ymm10,%ymm10
-
- vpslld $30,%ymm2,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm0,%ymm0
- vpsrld $31,%ymm10,%ymm9
- vpaddd %ymm10,%ymm10,%ymm10
-
- vpsrld $2,%ymm2,%ymm2
- vpaddd %ymm7,%ymm0,%ymm0
- vpor %ymm9,%ymm10,%ymm10
- vpor %ymm6,%ymm2,%ymm2
- vpxor %ymm13,%ymm11,%ymm11
- vmovdqa 192-128(%rax),%ymm13
-
- vpslld $5,%ymm0,%ymm7
- vpaddd %ymm15,%ymm4,%ymm4
- vpxor %ymm1,%ymm3,%ymm5
- vmovdqa %ymm10,96-128(%rax)
- vpaddd %ymm10,%ymm4,%ymm4
- vpxor 384-256-128(%rbx),%ymm11,%ymm11
- vpsrld $27,%ymm0,%ymm8
- vpxor %ymm2,%ymm5,%ymm5
- vpxor %ymm13,%ymm11,%ymm11
-
- vpslld $30,%ymm1,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm4,%ymm4
- vpsrld $31,%ymm11,%ymm9
- vpaddd %ymm11,%ymm11,%ymm11
-
- vpsrld $2,%ymm1,%ymm1
- vpaddd %ymm7,%ymm4,%ymm4
- vpor %ymm9,%ymm11,%ymm11
- vpor %ymm6,%ymm1,%ymm1
- vpxor %ymm14,%ymm12,%ymm12
- vmovdqa 224-128(%rax),%ymm14
-
- vpslld $5,%ymm4,%ymm7
- vpaddd %ymm15,%ymm3,%ymm3
- vpxor %ymm0,%ymm2,%ymm5
- vmovdqa %ymm11,128-128(%rax)
- vpaddd %ymm11,%ymm3,%ymm3
- vpxor 416-256-128(%rbx),%ymm12,%ymm12
- vpsrld $27,%ymm4,%ymm8
- vpxor %ymm1,%ymm5,%ymm5
- vpxor %ymm14,%ymm12,%ymm12
-
- vpslld $30,%ymm0,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm3,%ymm3
- vpsrld $31,%ymm12,%ymm9
- vpaddd %ymm12,%ymm12,%ymm12
-
- vpsrld $2,%ymm0,%ymm0
- vpaddd %ymm7,%ymm3,%ymm3
- vpor %ymm9,%ymm12,%ymm12
- vpor %ymm6,%ymm0,%ymm0
- vpxor %ymm10,%ymm13,%ymm13
- vmovdqa 256-256-128(%rbx),%ymm10
-
- vpslld $5,%ymm3,%ymm7
- vpaddd %ymm15,%ymm2,%ymm2
- vpxor %ymm4,%ymm1,%ymm5
- vmovdqa %ymm12,160-128(%rax)
- vpaddd %ymm12,%ymm2,%ymm2
- vpxor 448-256-128(%rbx),%ymm13,%ymm13
- vpsrld $27,%ymm3,%ymm8
- vpxor %ymm0,%ymm5,%ymm5
- vpxor %ymm10,%ymm13,%ymm13
-
- vpslld $30,%ymm4,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm2,%ymm2
- vpsrld $31,%ymm13,%ymm9
- vpaddd %ymm13,%ymm13,%ymm13
-
- vpsrld $2,%ymm4,%ymm4
- vpaddd %ymm7,%ymm2,%ymm2
- vpor %ymm9,%ymm13,%ymm13
- vpor %ymm6,%ymm4,%ymm4
- vpxor %ymm11,%ymm14,%ymm14
- vmovdqa 288-256-128(%rbx),%ymm11
-
- vpslld $5,%ymm2,%ymm7
- vpaddd %ymm15,%ymm1,%ymm1
- vpxor %ymm3,%ymm0,%ymm5
- vmovdqa %ymm13,192-128(%rax)
- vpaddd %ymm13,%ymm1,%ymm1
- vpxor 480-256-128(%rbx),%ymm14,%ymm14
- vpsrld $27,%ymm2,%ymm8
- vpxor %ymm4,%ymm5,%ymm5
- vpxor %ymm11,%ymm14,%ymm14
-
- vpslld $30,%ymm3,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm1,%ymm1
- vpsrld $31,%ymm14,%ymm9
- vpaddd %ymm14,%ymm14,%ymm14
-
- vpsrld $2,%ymm3,%ymm3
- vpaddd %ymm7,%ymm1,%ymm1
- vpor %ymm9,%ymm14,%ymm14
- vpor %ymm6,%ymm3,%ymm3
- vpxor %ymm12,%ymm10,%ymm10
- vmovdqa 320-256-128(%rbx),%ymm12
-
- vpslld $5,%ymm1,%ymm7
- vpaddd %ymm15,%ymm0,%ymm0
- vpxor %ymm2,%ymm4,%ymm5
- vmovdqa %ymm14,224-128(%rax)
- vpaddd %ymm14,%ymm0,%ymm0
- vpxor 0-128(%rax),%ymm10,%ymm10
- vpsrld $27,%ymm1,%ymm8
- vpxor %ymm3,%ymm5,%ymm5
- vpxor %ymm12,%ymm10,%ymm10
-
- vpslld $30,%ymm2,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm0,%ymm0
- vpsrld $31,%ymm10,%ymm9
- vpaddd %ymm10,%ymm10,%ymm10
-
- vpsrld $2,%ymm2,%ymm2
- vpaddd %ymm7,%ymm0,%ymm0
- vpor %ymm9,%ymm10,%ymm10
- vpor %ymm6,%ymm2,%ymm2
- vmovdqa 32(%rbp),%ymm15
- vpxor %ymm13,%ymm11,%ymm11
- vmovdqa 352-256-128(%rbx),%ymm13
-
- vpaddd %ymm15,%ymm4,%ymm4
- vpslld $5,%ymm0,%ymm7
- vpand %ymm2,%ymm3,%ymm6
- vpxor 32-128(%rax),%ymm11,%ymm11
-
- vpaddd %ymm6,%ymm4,%ymm4
- vpsrld $27,%ymm0,%ymm8
- vpxor %ymm2,%ymm3,%ymm5
- vpxor %ymm13,%ymm11,%ymm11
-
- vmovdqu %ymm10,256-256-128(%rbx)
- vpaddd %ymm10,%ymm4,%ymm4
- vpor %ymm8,%ymm7,%ymm7
- vpsrld $31,%ymm11,%ymm9
- vpand %ymm1,%ymm5,%ymm5
- vpaddd %ymm11,%ymm11,%ymm11
-
- vpslld $30,%ymm1,%ymm6
- vpaddd %ymm5,%ymm4,%ymm4
-
- vpsrld $2,%ymm1,%ymm1
- vpaddd %ymm7,%ymm4,%ymm4
- vpor %ymm9,%ymm11,%ymm11
- vpor %ymm6,%ymm1,%ymm1
- vpxor %ymm14,%ymm12,%ymm12
- vmovdqa 384-256-128(%rbx),%ymm14
-
- vpaddd %ymm15,%ymm3,%ymm3
- vpslld $5,%ymm4,%ymm7
- vpand %ymm1,%ymm2,%ymm6
- vpxor 64-128(%rax),%ymm12,%ymm12
-
- vpaddd %ymm6,%ymm3,%ymm3
- vpsrld $27,%ymm4,%ymm8
- vpxor %ymm1,%ymm2,%ymm5
- vpxor %ymm14,%ymm12,%ymm12
-
- vmovdqu %ymm11,288-256-128(%rbx)
- vpaddd %ymm11,%ymm3,%ymm3
- vpor %ymm8,%ymm7,%ymm7
- vpsrld $31,%ymm12,%ymm9
- vpand %ymm0,%ymm5,%ymm5
- vpaddd %ymm12,%ymm12,%ymm12
-
- vpslld $30,%ymm0,%ymm6
- vpaddd %ymm5,%ymm3,%ymm3
-
- vpsrld $2,%ymm0,%ymm0
- vpaddd %ymm7,%ymm3,%ymm3
- vpor %ymm9,%ymm12,%ymm12
- vpor %ymm6,%ymm0,%ymm0
- vpxor %ymm10,%ymm13,%ymm13
- vmovdqa 416-256-128(%rbx),%ymm10
-
- vpaddd %ymm15,%ymm2,%ymm2
- vpslld $5,%ymm3,%ymm7
- vpand %ymm0,%ymm1,%ymm6
- vpxor 96-128(%rax),%ymm13,%ymm13
-
- vpaddd %ymm6,%ymm2,%ymm2
- vpsrld $27,%ymm3,%ymm8
- vpxor %ymm0,%ymm1,%ymm5
- vpxor %ymm10,%ymm13,%ymm13
-
- vmovdqu %ymm12,320-256-128(%rbx)
- vpaddd %ymm12,%ymm2,%ymm2
- vpor %ymm8,%ymm7,%ymm7
- vpsrld $31,%ymm13,%ymm9
- vpand %ymm4,%ymm5,%ymm5
- vpaddd %ymm13,%ymm13,%ymm13
-
- vpslld $30,%ymm4,%ymm6
- vpaddd %ymm5,%ymm2,%ymm2
-
- vpsrld $2,%ymm4,%ymm4
- vpaddd %ymm7,%ymm2,%ymm2
- vpor %ymm9,%ymm13,%ymm13
- vpor %ymm6,%ymm4,%ymm4
- vpxor %ymm11,%ymm14,%ymm14
- vmovdqa 448-256-128(%rbx),%ymm11
-
- vpaddd %ymm15,%ymm1,%ymm1
- vpslld $5,%ymm2,%ymm7
- vpand %ymm4,%ymm0,%ymm6
- vpxor 128-128(%rax),%ymm14,%ymm14
-
- vpaddd %ymm6,%ymm1,%ymm1
- vpsrld $27,%ymm2,%ymm8
- vpxor %ymm4,%ymm0,%ymm5
- vpxor %ymm11,%ymm14,%ymm14
-
- vmovdqu %ymm13,352-256-128(%rbx)
- vpaddd %ymm13,%ymm1,%ymm1
- vpor %ymm8,%ymm7,%ymm7
- vpsrld $31,%ymm14,%ymm9
- vpand %ymm3,%ymm5,%ymm5
- vpaddd %ymm14,%ymm14,%ymm14
-
- vpslld $30,%ymm3,%ymm6
- vpaddd %ymm5,%ymm1,%ymm1
-
- vpsrld $2,%ymm3,%ymm3
- vpaddd %ymm7,%ymm1,%ymm1
- vpor %ymm9,%ymm14,%ymm14
- vpor %ymm6,%ymm3,%ymm3
- vpxor %ymm12,%ymm10,%ymm10
- vmovdqa 480-256-128(%rbx),%ymm12
-
- vpaddd %ymm15,%ymm0,%ymm0
- vpslld $5,%ymm1,%ymm7
- vpand %ymm3,%ymm4,%ymm6
- vpxor 160-128(%rax),%ymm10,%ymm10
-
- vpaddd %ymm6,%ymm0,%ymm0
- vpsrld $27,%ymm1,%ymm8
- vpxor %ymm3,%ymm4,%ymm5
- vpxor %ymm12,%ymm10,%ymm10
-
- vmovdqu %ymm14,384-256-128(%rbx)
- vpaddd %ymm14,%ymm0,%ymm0
- vpor %ymm8,%ymm7,%ymm7
- vpsrld $31,%ymm10,%ymm9
- vpand %ymm2,%ymm5,%ymm5
- vpaddd %ymm10,%ymm10,%ymm10
-
- vpslld $30,%ymm2,%ymm6
- vpaddd %ymm5,%ymm0,%ymm0
-
- vpsrld $2,%ymm2,%ymm2
- vpaddd %ymm7,%ymm0,%ymm0
- vpor %ymm9,%ymm10,%ymm10
- vpor %ymm6,%ymm2,%ymm2
- vpxor %ymm13,%ymm11,%ymm11
- vmovdqa 0-128(%rax),%ymm13
-
- vpaddd %ymm15,%ymm4,%ymm4
- vpslld $5,%ymm0,%ymm7
- vpand %ymm2,%ymm3,%ymm6
- vpxor 192-128(%rax),%ymm11,%ymm11
-
- vpaddd %ymm6,%ymm4,%ymm4
- vpsrld $27,%ymm0,%ymm8
- vpxor %ymm2,%ymm3,%ymm5
- vpxor %ymm13,%ymm11,%ymm11
-
- vmovdqu %ymm10,416-256-128(%rbx)
- vpaddd %ymm10,%ymm4,%ymm4
- vpor %ymm8,%ymm7,%ymm7
- vpsrld $31,%ymm11,%ymm9
- vpand %ymm1,%ymm5,%ymm5
- vpaddd %ymm11,%ymm11,%ymm11
-
- vpslld $30,%ymm1,%ymm6
- vpaddd %ymm5,%ymm4,%ymm4
-
- vpsrld $2,%ymm1,%ymm1
- vpaddd %ymm7,%ymm4,%ymm4
- vpor %ymm9,%ymm11,%ymm11
- vpor %ymm6,%ymm1,%ymm1
- vpxor %ymm14,%ymm12,%ymm12
- vmovdqa 32-128(%rax),%ymm14
-
- vpaddd %ymm15,%ymm3,%ymm3
- vpslld $5,%ymm4,%ymm7
- vpand %ymm1,%ymm2,%ymm6
- vpxor 224-128(%rax),%ymm12,%ymm12
-
- vpaddd %ymm6,%ymm3,%ymm3
- vpsrld $27,%ymm4,%ymm8
- vpxor %ymm1,%ymm2,%ymm5
- vpxor %ymm14,%ymm12,%ymm12
-
- vmovdqu %ymm11,448-256-128(%rbx)
- vpaddd %ymm11,%ymm3,%ymm3
- vpor %ymm8,%ymm7,%ymm7
- vpsrld $31,%ymm12,%ymm9
- vpand %ymm0,%ymm5,%ymm5
- vpaddd %ymm12,%ymm12,%ymm12
-
- vpslld $30,%ymm0,%ymm6
- vpaddd %ymm5,%ymm3,%ymm3
-
- vpsrld $2,%ymm0,%ymm0
- vpaddd %ymm7,%ymm3,%ymm3
- vpor %ymm9,%ymm12,%ymm12
- vpor %ymm6,%ymm0,%ymm0
- vpxor %ymm10,%ymm13,%ymm13
- vmovdqa 64-128(%rax),%ymm10
-
- vpaddd %ymm15,%ymm2,%ymm2
- vpslld $5,%ymm3,%ymm7
- vpand %ymm0,%ymm1,%ymm6
- vpxor 256-256-128(%rbx),%ymm13,%ymm13
-
- vpaddd %ymm6,%ymm2,%ymm2
- vpsrld $27,%ymm3,%ymm8
- vpxor %ymm0,%ymm1,%ymm5
- vpxor %ymm10,%ymm13,%ymm13
-
- vmovdqu %ymm12,480-256-128(%rbx)
- vpaddd %ymm12,%ymm2,%ymm2
- vpor %ymm8,%ymm7,%ymm7
- vpsrld $31,%ymm13,%ymm9
- vpand %ymm4,%ymm5,%ymm5
- vpaddd %ymm13,%ymm13,%ymm13
-
- vpslld $30,%ymm4,%ymm6
- vpaddd %ymm5,%ymm2,%ymm2
-
- vpsrld $2,%ymm4,%ymm4
- vpaddd %ymm7,%ymm2,%ymm2
- vpor %ymm9,%ymm13,%ymm13
- vpor %ymm6,%ymm4,%ymm4
- vpxor %ymm11,%ymm14,%ymm14
- vmovdqa 96-128(%rax),%ymm11
-
- vpaddd %ymm15,%ymm1,%ymm1
- vpslld $5,%ymm2,%ymm7
- vpand %ymm4,%ymm0,%ymm6
- vpxor 288-256-128(%rbx),%ymm14,%ymm14
-
- vpaddd %ymm6,%ymm1,%ymm1
- vpsrld $27,%ymm2,%ymm8
- vpxor %ymm4,%ymm0,%ymm5
- vpxor %ymm11,%ymm14,%ymm14
-
- vmovdqu %ymm13,0-128(%rax)
- vpaddd %ymm13,%ymm1,%ymm1
- vpor %ymm8,%ymm7,%ymm7
- vpsrld $31,%ymm14,%ymm9
- vpand %ymm3,%ymm5,%ymm5
- vpaddd %ymm14,%ymm14,%ymm14
-
- vpslld $30,%ymm3,%ymm6
- vpaddd %ymm5,%ymm1,%ymm1
-
- vpsrld $2,%ymm3,%ymm3
- vpaddd %ymm7,%ymm1,%ymm1
- vpor %ymm9,%ymm14,%ymm14
- vpor %ymm6,%ymm3,%ymm3
- vpxor %ymm12,%ymm10,%ymm10
- vmovdqa 128-128(%rax),%ymm12
-
- vpaddd %ymm15,%ymm0,%ymm0
- vpslld $5,%ymm1,%ymm7
- vpand %ymm3,%ymm4,%ymm6
- vpxor 320-256-128(%rbx),%ymm10,%ymm10
-
- vpaddd %ymm6,%ymm0,%ymm0
- vpsrld $27,%ymm1,%ymm8
- vpxor %ymm3,%ymm4,%ymm5
- vpxor %ymm12,%ymm10,%ymm10
-
- vmovdqu %ymm14,32-128(%rax)
- vpaddd %ymm14,%ymm0,%ymm0
- vpor %ymm8,%ymm7,%ymm7
- vpsrld $31,%ymm10,%ymm9
- vpand %ymm2,%ymm5,%ymm5
- vpaddd %ymm10,%ymm10,%ymm10
-
- vpslld $30,%ymm2,%ymm6
- vpaddd %ymm5,%ymm0,%ymm0
-
- vpsrld $2,%ymm2,%ymm2
- vpaddd %ymm7,%ymm0,%ymm0
- vpor %ymm9,%ymm10,%ymm10
- vpor %ymm6,%ymm2,%ymm2
- vpxor %ymm13,%ymm11,%ymm11
- vmovdqa 160-128(%rax),%ymm13
-
- vpaddd %ymm15,%ymm4,%ymm4
- vpslld $5,%ymm0,%ymm7
- vpand %ymm2,%ymm3,%ymm6
- vpxor 352-256-128(%rbx),%ymm11,%ymm11
-
- vpaddd %ymm6,%ymm4,%ymm4
- vpsrld $27,%ymm0,%ymm8
- vpxor %ymm2,%ymm3,%ymm5
- vpxor %ymm13,%ymm11,%ymm11
-
- vmovdqu %ymm10,64-128(%rax)
- vpaddd %ymm10,%ymm4,%ymm4
- vpor %ymm8,%ymm7,%ymm7
- vpsrld $31,%ymm11,%ymm9
- vpand %ymm1,%ymm5,%ymm5
- vpaddd %ymm11,%ymm11,%ymm11
-
- vpslld $30,%ymm1,%ymm6
- vpaddd %ymm5,%ymm4,%ymm4
-
- vpsrld $2,%ymm1,%ymm1
- vpaddd %ymm7,%ymm4,%ymm4
- vpor %ymm9,%ymm11,%ymm11
- vpor %ymm6,%ymm1,%ymm1
- vpxor %ymm14,%ymm12,%ymm12
- vmovdqa 192-128(%rax),%ymm14
-
- vpaddd %ymm15,%ymm3,%ymm3
- vpslld $5,%ymm4,%ymm7
- vpand %ymm1,%ymm2,%ymm6
- vpxor 384-256-128(%rbx),%ymm12,%ymm12
-
- vpaddd %ymm6,%ymm3,%ymm3
- vpsrld $27,%ymm4,%ymm8
- vpxor %ymm1,%ymm2,%ymm5
- vpxor %ymm14,%ymm12,%ymm12
-
- vmovdqu %ymm11,96-128(%rax)
- vpaddd %ymm11,%ymm3,%ymm3
- vpor %ymm8,%ymm7,%ymm7
- vpsrld $31,%ymm12,%ymm9
- vpand %ymm0,%ymm5,%ymm5
- vpaddd %ymm12,%ymm12,%ymm12
-
- vpslld $30,%ymm0,%ymm6
- vpaddd %ymm5,%ymm3,%ymm3
-
- vpsrld $2,%ymm0,%ymm0
- vpaddd %ymm7,%ymm3,%ymm3
- vpor %ymm9,%ymm12,%ymm12
- vpor %ymm6,%ymm0,%ymm0
- vpxor %ymm10,%ymm13,%ymm13
- vmovdqa 224-128(%rax),%ymm10
-
- vpaddd %ymm15,%ymm2,%ymm2
- vpslld $5,%ymm3,%ymm7
- vpand %ymm0,%ymm1,%ymm6
- vpxor 416-256-128(%rbx),%ymm13,%ymm13
-
- vpaddd %ymm6,%ymm2,%ymm2
- vpsrld $27,%ymm3,%ymm8
- vpxor %ymm0,%ymm1,%ymm5
- vpxor %ymm10,%ymm13,%ymm13
-
- vmovdqu %ymm12,128-128(%rax)
- vpaddd %ymm12,%ymm2,%ymm2
- vpor %ymm8,%ymm7,%ymm7
- vpsrld $31,%ymm13,%ymm9
- vpand %ymm4,%ymm5,%ymm5
- vpaddd %ymm13,%ymm13,%ymm13
-
- vpslld $30,%ymm4,%ymm6
- vpaddd %ymm5,%ymm2,%ymm2
-
- vpsrld $2,%ymm4,%ymm4
- vpaddd %ymm7,%ymm2,%ymm2
- vpor %ymm9,%ymm13,%ymm13
- vpor %ymm6,%ymm4,%ymm4
- vpxor %ymm11,%ymm14,%ymm14
- vmovdqa 256-256-128(%rbx),%ymm11
-
- vpaddd %ymm15,%ymm1,%ymm1
- vpslld $5,%ymm2,%ymm7
- vpand %ymm4,%ymm0,%ymm6
- vpxor 448-256-128(%rbx),%ymm14,%ymm14
-
- vpaddd %ymm6,%ymm1,%ymm1
- vpsrld $27,%ymm2,%ymm8
- vpxor %ymm4,%ymm0,%ymm5
- vpxor %ymm11,%ymm14,%ymm14
-
- vmovdqu %ymm13,160-128(%rax)
- vpaddd %ymm13,%ymm1,%ymm1
- vpor %ymm8,%ymm7,%ymm7
- vpsrld $31,%ymm14,%ymm9
- vpand %ymm3,%ymm5,%ymm5
- vpaddd %ymm14,%ymm14,%ymm14
-
- vpslld $30,%ymm3,%ymm6
- vpaddd %ymm5,%ymm1,%ymm1
-
- vpsrld $2,%ymm3,%ymm3
- vpaddd %ymm7,%ymm1,%ymm1
- vpor %ymm9,%ymm14,%ymm14
- vpor %ymm6,%ymm3,%ymm3
- vpxor %ymm12,%ymm10,%ymm10
- vmovdqa 288-256-128(%rbx),%ymm12
-
- vpaddd %ymm15,%ymm0,%ymm0
- vpslld $5,%ymm1,%ymm7
- vpand %ymm3,%ymm4,%ymm6
- vpxor 480-256-128(%rbx),%ymm10,%ymm10
-
- vpaddd %ymm6,%ymm0,%ymm0
- vpsrld $27,%ymm1,%ymm8
- vpxor %ymm3,%ymm4,%ymm5
- vpxor %ymm12,%ymm10,%ymm10
-
- vmovdqu %ymm14,192-128(%rax)
- vpaddd %ymm14,%ymm0,%ymm0
- vpor %ymm8,%ymm7,%ymm7
- vpsrld $31,%ymm10,%ymm9
- vpand %ymm2,%ymm5,%ymm5
- vpaddd %ymm10,%ymm10,%ymm10
-
- vpslld $30,%ymm2,%ymm6
- vpaddd %ymm5,%ymm0,%ymm0
-
- vpsrld $2,%ymm2,%ymm2
- vpaddd %ymm7,%ymm0,%ymm0
- vpor %ymm9,%ymm10,%ymm10
- vpor %ymm6,%ymm2,%ymm2
- vpxor %ymm13,%ymm11,%ymm11
- vmovdqa 320-256-128(%rbx),%ymm13
-
- vpaddd %ymm15,%ymm4,%ymm4
- vpslld $5,%ymm0,%ymm7
- vpand %ymm2,%ymm3,%ymm6
- vpxor 0-128(%rax),%ymm11,%ymm11
-
- vpaddd %ymm6,%ymm4,%ymm4
- vpsrld $27,%ymm0,%ymm8
- vpxor %ymm2,%ymm3,%ymm5
- vpxor %ymm13,%ymm11,%ymm11
-
- vmovdqu %ymm10,224-128(%rax)
- vpaddd %ymm10,%ymm4,%ymm4
- vpor %ymm8,%ymm7,%ymm7
- vpsrld $31,%ymm11,%ymm9
- vpand %ymm1,%ymm5,%ymm5
- vpaddd %ymm11,%ymm11,%ymm11
-
- vpslld $30,%ymm1,%ymm6
- vpaddd %ymm5,%ymm4,%ymm4
-
- vpsrld $2,%ymm1,%ymm1
- vpaddd %ymm7,%ymm4,%ymm4
- vpor %ymm9,%ymm11,%ymm11
- vpor %ymm6,%ymm1,%ymm1
- vpxor %ymm14,%ymm12,%ymm12
- vmovdqa 352-256-128(%rbx),%ymm14
-
- vpaddd %ymm15,%ymm3,%ymm3
- vpslld $5,%ymm4,%ymm7
- vpand %ymm1,%ymm2,%ymm6
- vpxor 32-128(%rax),%ymm12,%ymm12
-
- vpaddd %ymm6,%ymm3,%ymm3
- vpsrld $27,%ymm4,%ymm8
- vpxor %ymm1,%ymm2,%ymm5
- vpxor %ymm14,%ymm12,%ymm12
-
- vmovdqu %ymm11,256-256-128(%rbx)
- vpaddd %ymm11,%ymm3,%ymm3
- vpor %ymm8,%ymm7,%ymm7
- vpsrld $31,%ymm12,%ymm9
- vpand %ymm0,%ymm5,%ymm5
- vpaddd %ymm12,%ymm12,%ymm12
-
- vpslld $30,%ymm0,%ymm6
- vpaddd %ymm5,%ymm3,%ymm3
-
- vpsrld $2,%ymm0,%ymm0
- vpaddd %ymm7,%ymm3,%ymm3
- vpor %ymm9,%ymm12,%ymm12
- vpor %ymm6,%ymm0,%ymm0
- vpxor %ymm10,%ymm13,%ymm13
- vmovdqa 384-256-128(%rbx),%ymm10
-
- vpaddd %ymm15,%ymm2,%ymm2
- vpslld $5,%ymm3,%ymm7
- vpand %ymm0,%ymm1,%ymm6
- vpxor 64-128(%rax),%ymm13,%ymm13
-
- vpaddd %ymm6,%ymm2,%ymm2
- vpsrld $27,%ymm3,%ymm8
- vpxor %ymm0,%ymm1,%ymm5
- vpxor %ymm10,%ymm13,%ymm13
-
- vmovdqu %ymm12,288-256-128(%rbx)
- vpaddd %ymm12,%ymm2,%ymm2
- vpor %ymm8,%ymm7,%ymm7
- vpsrld $31,%ymm13,%ymm9
- vpand %ymm4,%ymm5,%ymm5
- vpaddd %ymm13,%ymm13,%ymm13
-
- vpslld $30,%ymm4,%ymm6
- vpaddd %ymm5,%ymm2,%ymm2
-
- vpsrld $2,%ymm4,%ymm4
- vpaddd %ymm7,%ymm2,%ymm2
- vpor %ymm9,%ymm13,%ymm13
- vpor %ymm6,%ymm4,%ymm4
- vpxor %ymm11,%ymm14,%ymm14
- vmovdqa 416-256-128(%rbx),%ymm11
-
- vpaddd %ymm15,%ymm1,%ymm1
- vpslld $5,%ymm2,%ymm7
- vpand %ymm4,%ymm0,%ymm6
- vpxor 96-128(%rax),%ymm14,%ymm14
-
- vpaddd %ymm6,%ymm1,%ymm1
- vpsrld $27,%ymm2,%ymm8
- vpxor %ymm4,%ymm0,%ymm5
- vpxor %ymm11,%ymm14,%ymm14
-
- vmovdqu %ymm13,320-256-128(%rbx)
- vpaddd %ymm13,%ymm1,%ymm1
- vpor %ymm8,%ymm7,%ymm7
- vpsrld $31,%ymm14,%ymm9
- vpand %ymm3,%ymm5,%ymm5
- vpaddd %ymm14,%ymm14,%ymm14
-
- vpslld $30,%ymm3,%ymm6
- vpaddd %ymm5,%ymm1,%ymm1
-
- vpsrld $2,%ymm3,%ymm3
- vpaddd %ymm7,%ymm1,%ymm1
- vpor %ymm9,%ymm14,%ymm14
- vpor %ymm6,%ymm3,%ymm3
- vpxor %ymm12,%ymm10,%ymm10
- vmovdqa 448-256-128(%rbx),%ymm12
-
- vpaddd %ymm15,%ymm0,%ymm0
- vpslld $5,%ymm1,%ymm7
- vpand %ymm3,%ymm4,%ymm6
- vpxor 128-128(%rax),%ymm10,%ymm10
-
- vpaddd %ymm6,%ymm0,%ymm0
- vpsrld $27,%ymm1,%ymm8
- vpxor %ymm3,%ymm4,%ymm5
- vpxor %ymm12,%ymm10,%ymm10
-
- vmovdqu %ymm14,352-256-128(%rbx)
- vpaddd %ymm14,%ymm0,%ymm0
- vpor %ymm8,%ymm7,%ymm7
- vpsrld $31,%ymm10,%ymm9
- vpand %ymm2,%ymm5,%ymm5
- vpaddd %ymm10,%ymm10,%ymm10
-
- vpslld $30,%ymm2,%ymm6
- vpaddd %ymm5,%ymm0,%ymm0
-
- vpsrld $2,%ymm2,%ymm2
- vpaddd %ymm7,%ymm0,%ymm0
- vpor %ymm9,%ymm10,%ymm10
- vpor %ymm6,%ymm2,%ymm2
- vmovdqa 64(%rbp),%ymm15
- vpxor %ymm13,%ymm11,%ymm11
- vmovdqa 480-256-128(%rbx),%ymm13
-
- vpslld $5,%ymm0,%ymm7
- vpaddd %ymm15,%ymm4,%ymm4
- vpxor %ymm1,%ymm3,%ymm5
- vmovdqa %ymm10,384-256-128(%rbx)
- vpaddd %ymm10,%ymm4,%ymm4
- vpxor 160-128(%rax),%ymm11,%ymm11
- vpsrld $27,%ymm0,%ymm8
- vpxor %ymm2,%ymm5,%ymm5
- vpxor %ymm13,%ymm11,%ymm11
-
- vpslld $30,%ymm1,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm4,%ymm4
- vpsrld $31,%ymm11,%ymm9
- vpaddd %ymm11,%ymm11,%ymm11
-
- vpsrld $2,%ymm1,%ymm1
- vpaddd %ymm7,%ymm4,%ymm4
- vpor %ymm9,%ymm11,%ymm11
- vpor %ymm6,%ymm1,%ymm1
- vpxor %ymm14,%ymm12,%ymm12
- vmovdqa 0-128(%rax),%ymm14
-
- vpslld $5,%ymm4,%ymm7
- vpaddd %ymm15,%ymm3,%ymm3
- vpxor %ymm0,%ymm2,%ymm5
- vmovdqa %ymm11,416-256-128(%rbx)
- vpaddd %ymm11,%ymm3,%ymm3
- vpxor 192-128(%rax),%ymm12,%ymm12
- vpsrld $27,%ymm4,%ymm8
- vpxor %ymm1,%ymm5,%ymm5
- vpxor %ymm14,%ymm12,%ymm12
-
- vpslld $30,%ymm0,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm3,%ymm3
- vpsrld $31,%ymm12,%ymm9
- vpaddd %ymm12,%ymm12,%ymm12
-
- vpsrld $2,%ymm0,%ymm0
- vpaddd %ymm7,%ymm3,%ymm3
- vpor %ymm9,%ymm12,%ymm12
- vpor %ymm6,%ymm0,%ymm0
- vpxor %ymm10,%ymm13,%ymm13
- vmovdqa 32-128(%rax),%ymm10
-
- vpslld $5,%ymm3,%ymm7
- vpaddd %ymm15,%ymm2,%ymm2
- vpxor %ymm4,%ymm1,%ymm5
- vmovdqa %ymm12,448-256-128(%rbx)
- vpaddd %ymm12,%ymm2,%ymm2
- vpxor 224-128(%rax),%ymm13,%ymm13
- vpsrld $27,%ymm3,%ymm8
- vpxor %ymm0,%ymm5,%ymm5
- vpxor %ymm10,%ymm13,%ymm13
-
- vpslld $30,%ymm4,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm2,%ymm2
- vpsrld $31,%ymm13,%ymm9
- vpaddd %ymm13,%ymm13,%ymm13
-
- vpsrld $2,%ymm4,%ymm4
- vpaddd %ymm7,%ymm2,%ymm2
- vpor %ymm9,%ymm13,%ymm13
- vpor %ymm6,%ymm4,%ymm4
- vpxor %ymm11,%ymm14,%ymm14
- vmovdqa 64-128(%rax),%ymm11
-
- vpslld $5,%ymm2,%ymm7
- vpaddd %ymm15,%ymm1,%ymm1
- vpxor %ymm3,%ymm0,%ymm5
- vmovdqa %ymm13,480-256-128(%rbx)
- vpaddd %ymm13,%ymm1,%ymm1
- vpxor 256-256-128(%rbx),%ymm14,%ymm14
- vpsrld $27,%ymm2,%ymm8
- vpxor %ymm4,%ymm5,%ymm5
- vpxor %ymm11,%ymm14,%ymm14
-
- vpslld $30,%ymm3,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm1,%ymm1
- vpsrld $31,%ymm14,%ymm9
- vpaddd %ymm14,%ymm14,%ymm14
-
- vpsrld $2,%ymm3,%ymm3
- vpaddd %ymm7,%ymm1,%ymm1
- vpor %ymm9,%ymm14,%ymm14
- vpor %ymm6,%ymm3,%ymm3
- vpxor %ymm12,%ymm10,%ymm10
- vmovdqa 96-128(%rax),%ymm12
-
- vpslld $5,%ymm1,%ymm7
- vpaddd %ymm15,%ymm0,%ymm0
- vpxor %ymm2,%ymm4,%ymm5
- vmovdqa %ymm14,0-128(%rax)
- vpaddd %ymm14,%ymm0,%ymm0
- vpxor 288-256-128(%rbx),%ymm10,%ymm10
- vpsrld $27,%ymm1,%ymm8
- vpxor %ymm3,%ymm5,%ymm5
- vpxor %ymm12,%ymm10,%ymm10
-
- vpslld $30,%ymm2,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm0,%ymm0
- vpsrld $31,%ymm10,%ymm9
- vpaddd %ymm10,%ymm10,%ymm10
-
- vpsrld $2,%ymm2,%ymm2
- vpaddd %ymm7,%ymm0,%ymm0
- vpor %ymm9,%ymm10,%ymm10
- vpor %ymm6,%ymm2,%ymm2
- vpxor %ymm13,%ymm11,%ymm11
- vmovdqa 128-128(%rax),%ymm13
-
- vpslld $5,%ymm0,%ymm7
- vpaddd %ymm15,%ymm4,%ymm4
- vpxor %ymm1,%ymm3,%ymm5
- vmovdqa %ymm10,32-128(%rax)
- vpaddd %ymm10,%ymm4,%ymm4
- vpxor 320-256-128(%rbx),%ymm11,%ymm11
- vpsrld $27,%ymm0,%ymm8
- vpxor %ymm2,%ymm5,%ymm5
- vpxor %ymm13,%ymm11,%ymm11
-
- vpslld $30,%ymm1,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm4,%ymm4
- vpsrld $31,%ymm11,%ymm9
- vpaddd %ymm11,%ymm11,%ymm11
-
- vpsrld $2,%ymm1,%ymm1
- vpaddd %ymm7,%ymm4,%ymm4
- vpor %ymm9,%ymm11,%ymm11
- vpor %ymm6,%ymm1,%ymm1
- vpxor %ymm14,%ymm12,%ymm12
- vmovdqa 160-128(%rax),%ymm14
-
- vpslld $5,%ymm4,%ymm7
- vpaddd %ymm15,%ymm3,%ymm3
- vpxor %ymm0,%ymm2,%ymm5
- vmovdqa %ymm11,64-128(%rax)
- vpaddd %ymm11,%ymm3,%ymm3
- vpxor 352-256-128(%rbx),%ymm12,%ymm12
- vpsrld $27,%ymm4,%ymm8
- vpxor %ymm1,%ymm5,%ymm5
- vpxor %ymm14,%ymm12,%ymm12
-
- vpslld $30,%ymm0,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm3,%ymm3
- vpsrld $31,%ymm12,%ymm9
- vpaddd %ymm12,%ymm12,%ymm12
-
- vpsrld $2,%ymm0,%ymm0
- vpaddd %ymm7,%ymm3,%ymm3
- vpor %ymm9,%ymm12,%ymm12
- vpor %ymm6,%ymm0,%ymm0
- vpxor %ymm10,%ymm13,%ymm13
- vmovdqa 192-128(%rax),%ymm10
-
- vpslld $5,%ymm3,%ymm7
- vpaddd %ymm15,%ymm2,%ymm2
- vpxor %ymm4,%ymm1,%ymm5
- vmovdqa %ymm12,96-128(%rax)
- vpaddd %ymm12,%ymm2,%ymm2
- vpxor 384-256-128(%rbx),%ymm13,%ymm13
- vpsrld $27,%ymm3,%ymm8
- vpxor %ymm0,%ymm5,%ymm5
- vpxor %ymm10,%ymm13,%ymm13
-
- vpslld $30,%ymm4,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm2,%ymm2
- vpsrld $31,%ymm13,%ymm9
- vpaddd %ymm13,%ymm13,%ymm13
-
- vpsrld $2,%ymm4,%ymm4
- vpaddd %ymm7,%ymm2,%ymm2
- vpor %ymm9,%ymm13,%ymm13
- vpor %ymm6,%ymm4,%ymm4
- vpxor %ymm11,%ymm14,%ymm14
- vmovdqa 224-128(%rax),%ymm11
-
- vpslld $5,%ymm2,%ymm7
- vpaddd %ymm15,%ymm1,%ymm1
- vpxor %ymm3,%ymm0,%ymm5
- vmovdqa %ymm13,128-128(%rax)
- vpaddd %ymm13,%ymm1,%ymm1
- vpxor 416-256-128(%rbx),%ymm14,%ymm14
- vpsrld $27,%ymm2,%ymm8
- vpxor %ymm4,%ymm5,%ymm5
- vpxor %ymm11,%ymm14,%ymm14
-
- vpslld $30,%ymm3,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm1,%ymm1
- vpsrld $31,%ymm14,%ymm9
- vpaddd %ymm14,%ymm14,%ymm14
-
- vpsrld $2,%ymm3,%ymm3
- vpaddd %ymm7,%ymm1,%ymm1
- vpor %ymm9,%ymm14,%ymm14
- vpor %ymm6,%ymm3,%ymm3
- vpxor %ymm12,%ymm10,%ymm10
- vmovdqa 256-256-128(%rbx),%ymm12
-
- vpslld $5,%ymm1,%ymm7
- vpaddd %ymm15,%ymm0,%ymm0
- vpxor %ymm2,%ymm4,%ymm5
- vmovdqa %ymm14,160-128(%rax)
- vpaddd %ymm14,%ymm0,%ymm0
- vpxor 448-256-128(%rbx),%ymm10,%ymm10
- vpsrld $27,%ymm1,%ymm8
- vpxor %ymm3,%ymm5,%ymm5
- vpxor %ymm12,%ymm10,%ymm10
-
- vpslld $30,%ymm2,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm0,%ymm0
- vpsrld $31,%ymm10,%ymm9
- vpaddd %ymm10,%ymm10,%ymm10
-
- vpsrld $2,%ymm2,%ymm2
- vpaddd %ymm7,%ymm0,%ymm0
- vpor %ymm9,%ymm10,%ymm10
- vpor %ymm6,%ymm2,%ymm2
- vpxor %ymm13,%ymm11,%ymm11
- vmovdqa 288-256-128(%rbx),%ymm13
-
- vpslld $5,%ymm0,%ymm7
- vpaddd %ymm15,%ymm4,%ymm4
- vpxor %ymm1,%ymm3,%ymm5
- vmovdqa %ymm10,192-128(%rax)
- vpaddd %ymm10,%ymm4,%ymm4
- vpxor 480-256-128(%rbx),%ymm11,%ymm11
- vpsrld $27,%ymm0,%ymm8
- vpxor %ymm2,%ymm5,%ymm5
- vpxor %ymm13,%ymm11,%ymm11
-
- vpslld $30,%ymm1,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm4,%ymm4
- vpsrld $31,%ymm11,%ymm9
- vpaddd %ymm11,%ymm11,%ymm11
-
- vpsrld $2,%ymm1,%ymm1
- vpaddd %ymm7,%ymm4,%ymm4
- vpor %ymm9,%ymm11,%ymm11
- vpor %ymm6,%ymm1,%ymm1
- vpxor %ymm14,%ymm12,%ymm12
- vmovdqa 320-256-128(%rbx),%ymm14
-
- vpslld $5,%ymm4,%ymm7
- vpaddd %ymm15,%ymm3,%ymm3
- vpxor %ymm0,%ymm2,%ymm5
- vmovdqa %ymm11,224-128(%rax)
- vpaddd %ymm11,%ymm3,%ymm3
- vpxor 0-128(%rax),%ymm12,%ymm12
- vpsrld $27,%ymm4,%ymm8
- vpxor %ymm1,%ymm5,%ymm5
- vpxor %ymm14,%ymm12,%ymm12
-
- vpslld $30,%ymm0,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm3,%ymm3
- vpsrld $31,%ymm12,%ymm9
- vpaddd %ymm12,%ymm12,%ymm12
-
- vpsrld $2,%ymm0,%ymm0
- vpaddd %ymm7,%ymm3,%ymm3
- vpor %ymm9,%ymm12,%ymm12
- vpor %ymm6,%ymm0,%ymm0
- vpxor %ymm10,%ymm13,%ymm13
- vmovdqa 352-256-128(%rbx),%ymm10
-
- vpslld $5,%ymm3,%ymm7
- vpaddd %ymm15,%ymm2,%ymm2
- vpxor %ymm4,%ymm1,%ymm5
- vpaddd %ymm12,%ymm2,%ymm2
- vpxor 32-128(%rax),%ymm13,%ymm13
- vpsrld $27,%ymm3,%ymm8
- vpxor %ymm0,%ymm5,%ymm5
- vpxor %ymm10,%ymm13,%ymm13
-
- vpslld $30,%ymm4,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm2,%ymm2
- vpsrld $31,%ymm13,%ymm9
- vpaddd %ymm13,%ymm13,%ymm13
-
- vpsrld $2,%ymm4,%ymm4
- vpaddd %ymm7,%ymm2,%ymm2
- vpor %ymm9,%ymm13,%ymm13
- vpor %ymm6,%ymm4,%ymm4
- vpxor %ymm11,%ymm14,%ymm14
- vmovdqa 384-256-128(%rbx),%ymm11
-
- vpslld $5,%ymm2,%ymm7
- vpaddd %ymm15,%ymm1,%ymm1
- vpxor %ymm3,%ymm0,%ymm5
- vpaddd %ymm13,%ymm1,%ymm1
- vpxor 64-128(%rax),%ymm14,%ymm14
- vpsrld $27,%ymm2,%ymm8
- vpxor %ymm4,%ymm5,%ymm5
- vpxor %ymm11,%ymm14,%ymm14
-
- vpslld $30,%ymm3,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm1,%ymm1
- vpsrld $31,%ymm14,%ymm9
- vpaddd %ymm14,%ymm14,%ymm14
-
- vpsrld $2,%ymm3,%ymm3
- vpaddd %ymm7,%ymm1,%ymm1
- vpor %ymm9,%ymm14,%ymm14
- vpor %ymm6,%ymm3,%ymm3
- vpxor %ymm12,%ymm10,%ymm10
- vmovdqa 416-256-128(%rbx),%ymm12
-
- vpslld $5,%ymm1,%ymm7
- vpaddd %ymm15,%ymm0,%ymm0
- vpxor %ymm2,%ymm4,%ymm5
- vpaddd %ymm14,%ymm0,%ymm0
- vpxor 96-128(%rax),%ymm10,%ymm10
- vpsrld $27,%ymm1,%ymm8
- vpxor %ymm3,%ymm5,%ymm5
- vpxor %ymm12,%ymm10,%ymm10
-
- vpslld $30,%ymm2,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm0,%ymm0
- vpsrld $31,%ymm10,%ymm9
- vpaddd %ymm10,%ymm10,%ymm10
-
- vpsrld $2,%ymm2,%ymm2
- vpaddd %ymm7,%ymm0,%ymm0
- vpor %ymm9,%ymm10,%ymm10
- vpor %ymm6,%ymm2,%ymm2
- vpxor %ymm13,%ymm11,%ymm11
- vmovdqa 448-256-128(%rbx),%ymm13
-
- vpslld $5,%ymm0,%ymm7
- vpaddd %ymm15,%ymm4,%ymm4
- vpxor %ymm1,%ymm3,%ymm5
- vpaddd %ymm10,%ymm4,%ymm4
- vpxor 128-128(%rax),%ymm11,%ymm11
- vpsrld $27,%ymm0,%ymm8
- vpxor %ymm2,%ymm5,%ymm5
- vpxor %ymm13,%ymm11,%ymm11
-
- vpslld $30,%ymm1,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm4,%ymm4
- vpsrld $31,%ymm11,%ymm9
- vpaddd %ymm11,%ymm11,%ymm11
-
- vpsrld $2,%ymm1,%ymm1
- vpaddd %ymm7,%ymm4,%ymm4
- vpor %ymm9,%ymm11,%ymm11
- vpor %ymm6,%ymm1,%ymm1
- vpxor %ymm14,%ymm12,%ymm12
- vmovdqa 480-256-128(%rbx),%ymm14
-
- vpslld $5,%ymm4,%ymm7
- vpaddd %ymm15,%ymm3,%ymm3
- vpxor %ymm0,%ymm2,%ymm5
- vpaddd %ymm11,%ymm3,%ymm3
- vpxor 160-128(%rax),%ymm12,%ymm12
- vpsrld $27,%ymm4,%ymm8
- vpxor %ymm1,%ymm5,%ymm5
- vpxor %ymm14,%ymm12,%ymm12
-
- vpslld $30,%ymm0,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm3,%ymm3
- vpsrld $31,%ymm12,%ymm9
- vpaddd %ymm12,%ymm12,%ymm12
-
- vpsrld $2,%ymm0,%ymm0
- vpaddd %ymm7,%ymm3,%ymm3
- vpor %ymm9,%ymm12,%ymm12
- vpor %ymm6,%ymm0,%ymm0
- vpxor %ymm10,%ymm13,%ymm13
- vmovdqa 0-128(%rax),%ymm10
-
- vpslld $5,%ymm3,%ymm7
- vpaddd %ymm15,%ymm2,%ymm2
- vpxor %ymm4,%ymm1,%ymm5
- vpaddd %ymm12,%ymm2,%ymm2
- vpxor 192-128(%rax),%ymm13,%ymm13
- vpsrld $27,%ymm3,%ymm8
- vpxor %ymm0,%ymm5,%ymm5
- vpxor %ymm10,%ymm13,%ymm13
-
- vpslld $30,%ymm4,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm2,%ymm2
- vpsrld $31,%ymm13,%ymm9
- vpaddd %ymm13,%ymm13,%ymm13
-
- vpsrld $2,%ymm4,%ymm4
- vpaddd %ymm7,%ymm2,%ymm2
- vpor %ymm9,%ymm13,%ymm13
- vpor %ymm6,%ymm4,%ymm4
- vpxor %ymm11,%ymm14,%ymm14
- vmovdqa 32-128(%rax),%ymm11
-
- vpslld $5,%ymm2,%ymm7
- vpaddd %ymm15,%ymm1,%ymm1
- vpxor %ymm3,%ymm0,%ymm5
- vpaddd %ymm13,%ymm1,%ymm1
- vpxor 224-128(%rax),%ymm14,%ymm14
- vpsrld $27,%ymm2,%ymm8
- vpxor %ymm4,%ymm5,%ymm5
- vpxor %ymm11,%ymm14,%ymm14
-
- vpslld $30,%ymm3,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm1,%ymm1
- vpsrld $31,%ymm14,%ymm9
- vpaddd %ymm14,%ymm14,%ymm14
-
- vpsrld $2,%ymm3,%ymm3
- vpaddd %ymm7,%ymm1,%ymm1
- vpor %ymm9,%ymm14,%ymm14
- vpor %ymm6,%ymm3,%ymm3
- vpslld $5,%ymm1,%ymm7
- vpaddd %ymm15,%ymm0,%ymm0
- vpxor %ymm2,%ymm4,%ymm5
-
- vpsrld $27,%ymm1,%ymm8
- vpaddd %ymm14,%ymm0,%ymm0
- vpxor %ymm3,%ymm5,%ymm5
-
- vpslld $30,%ymm2,%ymm6
- vpor %ymm8,%ymm7,%ymm7
- vpaddd %ymm5,%ymm0,%ymm0
-
- vpsrld $2,%ymm2,%ymm2
- vpaddd %ymm7,%ymm0,%ymm0
- vpor %ymm6,%ymm2,%ymm2
- movl $1,%ecx
- leaq 512(%rsp),%rbx
- cmpl 0(%rbx),%ecx
- cmovgeq %rbp,%r12
- cmpl 4(%rbx),%ecx
- cmovgeq %rbp,%r13
- cmpl 8(%rbx),%ecx
- cmovgeq %rbp,%r14
- cmpl 12(%rbx),%ecx
- cmovgeq %rbp,%r15
- cmpl 16(%rbx),%ecx
- cmovgeq %rbp,%r8
- cmpl 20(%rbx),%ecx
- cmovgeq %rbp,%r9
- cmpl 24(%rbx),%ecx
- cmovgeq %rbp,%r10
- cmpl 28(%rbx),%ecx
- cmovgeq %rbp,%r11
- vmovdqu (%rbx),%ymm5
- vpxor %ymm7,%ymm7,%ymm7
- vmovdqa %ymm5,%ymm6
- vpcmpgtd %ymm7,%ymm6,%ymm6
- vpaddd %ymm6,%ymm5,%ymm5
-
- vpand %ymm6,%ymm0,%ymm0
- vpand %ymm6,%ymm1,%ymm1
- vpaddd 0(%rdi),%ymm0,%ymm0
- vpand %ymm6,%ymm2,%ymm2
- vpaddd 32(%rdi),%ymm1,%ymm1
- vpand %ymm6,%ymm3,%ymm3
- vpaddd 64(%rdi),%ymm2,%ymm2
- vpand %ymm6,%ymm4,%ymm4
- vpaddd 96(%rdi),%ymm3,%ymm3
- vpaddd 128(%rdi),%ymm4,%ymm4
- vmovdqu %ymm0,0(%rdi)
- vmovdqu %ymm1,32(%rdi)
- vmovdqu %ymm2,64(%rdi)
- vmovdqu %ymm3,96(%rdi)
- vmovdqu %ymm4,128(%rdi)
-
- vmovdqu %ymm5,(%rbx)
- leaq 256+128(%rsp),%rbx
- vmovdqu 96(%rbp),%ymm9
- decl %edx
- jnz .Loop_avx2
-
-
-
-
-
-
-
-.Ldone_avx2:
- movq 544(%rsp),%rax
-.cfi_def_cfa %rax,8
- vzeroupper
- movq -48(%rax),%r15
-.cfi_restore %r15
- movq -40(%rax),%r14
-.cfi_restore %r14
- movq -32(%rax),%r13
-.cfi_restore %r13
- movq -24(%rax),%r12
-.cfi_restore %r12
- movq -16(%rax),%rbp
-.cfi_restore %rbp
- movq -8(%rax),%rbx
-.cfi_restore %rbx
- leaq (%rax),%rsp
-.cfi_def_cfa_register %rsp
-.Lepilogue_avx2:
- .byte 0xf3,0xc3
-.cfi_endproc
-.size sha1_multi_block_avx2,.-sha1_multi_block_avx2
.align 256
.long 0x5a827999,0x5a827999,0x5a827999,0x5a827999
diff --git a/secure/lib/libcrypto/amd64/sha1-x86_64.S b/secure/lib/libcrypto/amd64/sha1-x86_64.S
index 6848be8786212..cf36e17d31211 100644
--- a/secure/lib/libcrypto/amd64/sha1-x86_64.S
+++ b/secure/lib/libcrypto/amd64/sha1-x86_64.S
@@ -15,14 +15,6 @@ sha1_block_data_order:
jz .Lialu
testl $536870912,%r10d
jnz _shaext_shortcut
- andl $296,%r10d
- cmpl $296,%r10d
- je _avx2_shortcut
- andl $268435456,%r8d
- andl $1073741824,%r9d
- orl %r9d,%r8d
- cmpl $1342177280,%r8d
- je _avx_shortcut
jmp _ssse3_shortcut
.align 16
@@ -1423,8 +1415,8 @@ _shaext_shortcut:
pshufd $27,%xmm1,%xmm1
movdqu %xmm0,(%rdi)
movd %xmm1,16(%rdi)
-.cfi_endproc
.byte 0xf3,0xc3
+.cfi_endproc
.size sha1_block_data_order_shaext,.-sha1_block_data_order_shaext
.type sha1_block_data_order_ssse3,@function
.align 16
@@ -2614,2827 +2606,6 @@ _ssse3_shortcut:
.byte 0xf3,0xc3
.cfi_endproc
.size sha1_block_data_order_ssse3,.-sha1_block_data_order_ssse3
-.type sha1_block_data_order_avx,@function
-.align 16
-sha1_block_data_order_avx:
-_avx_shortcut:
-.cfi_startproc
- movq %rsp,%r11
-.cfi_def_cfa_register %r11
- pushq %rbx
-.cfi_offset %rbx,-16
- pushq %rbp
-.cfi_offset %rbp,-24
- pushq %r12
-.cfi_offset %r12,-32
- pushq %r13
-.cfi_offset %r13,-40
- pushq %r14
-.cfi_offset %r14,-48
- leaq -64(%rsp),%rsp
- vzeroupper
- andq $-64,%rsp
- movq %rdi,%r8
- movq %rsi,%r9
- movq %rdx,%r10
-
- shlq $6,%r10
- addq %r9,%r10
- leaq K_XX_XX+64(%rip),%r14
-
- movl 0(%r8),%eax
- movl 4(%r8),%ebx
- movl 8(%r8),%ecx
- movl 12(%r8),%edx
- movl %ebx,%esi
- movl 16(%r8),%ebp
- movl %ecx,%edi
- xorl %edx,%edi
- andl %edi,%esi
-
- vmovdqa 64(%r14),%xmm6
- vmovdqa -64(%r14),%xmm11
- vmovdqu 0(%r9),%xmm0
- vmovdqu 16(%r9),%xmm1
- vmovdqu 32(%r9),%xmm2
- vmovdqu 48(%r9),%xmm3
- vpshufb %xmm6,%xmm0,%xmm0
- addq $64,%r9
- vpshufb %xmm6,%xmm1,%xmm1
- vpshufb %xmm6,%xmm2,%xmm2
- vpshufb %xmm6,%xmm3,%xmm3
- vpaddd %xmm11,%xmm0,%xmm4
- vpaddd %xmm11,%xmm1,%xmm5
- vpaddd %xmm11,%xmm2,%xmm6
- vmovdqa %xmm4,0(%rsp)
- vmovdqa %xmm5,16(%rsp)
- vmovdqa %xmm6,32(%rsp)
- jmp .Loop_avx
-.align 16
-.Loop_avx:
- shrdl $2,%ebx,%ebx
- xorl %edx,%esi
- vpalignr $8,%xmm0,%xmm1,%xmm4
- movl %eax,%edi
- addl 0(%rsp),%ebp
- vpaddd %xmm3,%xmm11,%xmm9
- xorl %ecx,%ebx
- shldl $5,%eax,%eax
- vpsrldq $4,%xmm3,%xmm8
- addl %esi,%ebp
- andl %ebx,%edi
- vpxor %xmm0,%xmm4,%xmm4
- xorl %ecx,%ebx
- addl %eax,%ebp
- vpxor %xmm2,%xmm8,%xmm8
- shrdl $7,%eax,%eax
- xorl %ecx,%edi
- movl %ebp,%esi
- addl 4(%rsp),%edx
- vpxor %xmm8,%xmm4,%xmm4
- xorl %ebx,%eax
- shldl $5,%ebp,%ebp
- vmovdqa %xmm9,48(%rsp)
- addl %edi,%edx
- andl %eax,%esi
- vpsrld $31,%xmm4,%xmm8
- xorl %ebx,%eax
- addl %ebp,%edx
- shrdl $7,%ebp,%ebp
- xorl %ebx,%esi
- vpslldq $12,%xmm4,%xmm10
- vpaddd %xmm4,%xmm4,%xmm4
- movl %edx,%edi
- addl 8(%rsp),%ecx
- xorl %eax,%ebp
- shldl $5,%edx,%edx
- vpsrld $30,%xmm10,%xmm9
- vpor %xmm8,%xmm4,%xmm4
- addl %esi,%ecx
- andl %ebp,%edi
- xorl %eax,%ebp
- addl %edx,%ecx
- vpslld $2,%xmm10,%xmm10
- vpxor %xmm9,%xmm4,%xmm4
- shrdl $7,%edx,%edx
- xorl %eax,%edi
- movl %ecx,%esi
- addl 12(%rsp),%ebx
- vpxor %xmm10,%xmm4,%xmm4
- xorl %ebp,%edx
- shldl $5,%ecx,%ecx
- addl %edi,%ebx
- andl %edx,%esi
- xorl %ebp,%edx
- addl %ecx,%ebx
- shrdl $7,%ecx,%ecx
- xorl %ebp,%esi
- vpalignr $8,%xmm1,%xmm2,%xmm5
- movl %ebx,%edi
- addl 16(%rsp),%eax
- vpaddd %xmm4,%xmm11,%xmm9
- xorl %edx,%ecx
- shldl $5,%ebx,%ebx
- vpsrldq $4,%xmm4,%xmm8
- addl %esi,%eax
- andl %ecx,%edi
- vpxor %xmm1,%xmm5,%xmm5
- xorl %edx,%ecx
- addl %ebx,%eax
- vpxor %xmm3,%xmm8,%xmm8
- shrdl $7,%ebx,%ebx
- xorl %edx,%edi
- movl %eax,%esi
- addl 20(%rsp),%ebp
- vpxor %xmm8,%xmm5,%xmm5
- xorl %ecx,%ebx
- shldl $5,%eax,%eax
- vmovdqa %xmm9,0(%rsp)
- addl %edi,%ebp
- andl %ebx,%esi
- vpsrld $31,%xmm5,%xmm8
- xorl %ecx,%ebx
- addl %eax,%ebp
- shrdl $7,%eax,%eax
- xorl %ecx,%esi
- vpslldq $12,%xmm5,%xmm10
- vpaddd %xmm5,%xmm5,%xmm5
- movl %ebp,%edi
- addl 24(%rsp),%edx
- xorl %ebx,%eax
- shldl $5,%ebp,%ebp
- vpsrld $30,%xmm10,%xmm9
- vpor %xmm8,%xmm5,%xmm5
- addl %esi,%edx
- andl %eax,%edi
- xorl %ebx,%eax
- addl %ebp,%edx
- vpslld $2,%xmm10,%xmm10
- vpxor %xmm9,%xmm5,%xmm5
- shrdl $7,%ebp,%ebp
- xorl %ebx,%edi
- movl %edx,%esi
- addl 28(%rsp),%ecx
- vpxor %xmm10,%xmm5,%xmm5
- xorl %eax,%ebp
- shldl $5,%edx,%edx
- vmovdqa -32(%r14),%xmm11
- addl %edi,%ecx
- andl %ebp,%esi
- xorl %eax,%ebp
- addl %edx,%ecx
- shrdl $7,%edx,%edx
- xorl %eax,%esi
- vpalignr $8,%xmm2,%xmm3,%xmm6
- movl %ecx,%edi
- addl 32(%rsp),%ebx
- vpaddd %xmm5,%xmm11,%xmm9
- xorl %ebp,%edx
- shldl $5,%ecx,%ecx
- vpsrldq $4,%xmm5,%xmm8
- addl %esi,%ebx
- andl %edx,%edi
- vpxor %xmm2,%xmm6,%xmm6
- xorl %ebp,%edx
- addl %ecx,%ebx
- vpxor %xmm4,%xmm8,%xmm8
- shrdl $7,%ecx,%ecx
- xorl %ebp,%edi
- movl %ebx,%esi
- addl 36(%rsp),%eax
- vpxor %xmm8,%xmm6,%xmm6
- xorl %edx,%ecx
- shldl $5,%ebx,%ebx
- vmovdqa %xmm9,16(%rsp)
- addl %edi,%eax
- andl %ecx,%esi
- vpsrld $31,%xmm6,%xmm8
- xorl %edx,%ecx
- addl %ebx,%eax
- shrdl $7,%ebx,%ebx
- xorl %edx,%esi
- vpslldq $12,%xmm6,%xmm10
- vpaddd %xmm6,%xmm6,%xmm6
- movl %eax,%edi
- addl 40(%rsp),%ebp
- xorl %ecx,%ebx
- shldl $5,%eax,%eax
- vpsrld $30,%xmm10,%xmm9
- vpor %xmm8,%xmm6,%xmm6
- addl %esi,%ebp
- andl %ebx,%edi
- xorl %ecx,%ebx
- addl %eax,%ebp
- vpslld $2,%xmm10,%xmm10
- vpxor %xmm9,%xmm6,%xmm6
- shrdl $7,%eax,%eax
- xorl %ecx,%edi
- movl %ebp,%esi
- addl 44(%rsp),%edx
- vpxor %xmm10,%xmm6,%xmm6
- xorl %ebx,%eax
- shldl $5,%ebp,%ebp
- addl %edi,%edx
- andl %eax,%esi
- xorl %ebx,%eax
- addl %ebp,%edx
- shrdl $7,%ebp,%ebp
- xorl %ebx,%esi
- vpalignr $8,%xmm3,%xmm4,%xmm7
- movl %edx,%edi
- addl 48(%rsp),%ecx
- vpaddd %xmm6,%xmm11,%xmm9
- xorl %eax,%ebp
- shldl $5,%edx,%edx
- vpsrldq $4,%xmm6,%xmm8
- addl %esi,%ecx
- andl %ebp,%edi
- vpxor %xmm3,%xmm7,%xmm7
- xorl %eax,%ebp
- addl %edx,%ecx
- vpxor %xmm5,%xmm8,%xmm8
- shrdl $7,%edx,%edx
- xorl %eax,%edi
- movl %ecx,%esi
- addl 52(%rsp),%ebx
- vpxor %xmm8,%xmm7,%xmm7
- xorl %ebp,%edx
- shldl $5,%ecx,%ecx
- vmovdqa %xmm9,32(%rsp)
- addl %edi,%ebx
- andl %edx,%esi
- vpsrld $31,%xmm7,%xmm8
- xorl %ebp,%edx
- addl %ecx,%ebx
- shrdl $7,%ecx,%ecx
- xorl %ebp,%esi
- vpslldq $12,%xmm7,%xmm10
- vpaddd %xmm7,%xmm7,%xmm7
- movl %ebx,%edi
- addl 56(%rsp),%eax
- xorl %edx,%ecx
- shldl $5,%ebx,%ebx
- vpsrld $30,%xmm10,%xmm9
- vpor %xmm8,%xmm7,%xmm7
- addl %esi,%eax
- andl %ecx,%edi
- xorl %edx,%ecx
- addl %ebx,%eax
- vpslld $2,%xmm10,%xmm10
- vpxor %xmm9,%xmm7,%xmm7
- shrdl $7,%ebx,%ebx
- xorl %edx,%edi
- movl %eax,%esi
- addl 60(%rsp),%ebp
- vpxor %xmm10,%xmm7,%xmm7
- xorl %ecx,%ebx
- shldl $5,%eax,%eax
- addl %edi,%ebp
- andl %ebx,%esi
- xorl %ecx,%ebx
- addl %eax,%ebp
- vpalignr $8,%xmm6,%xmm7,%xmm8
- vpxor %xmm4,%xmm0,%xmm0
- shrdl $7,%eax,%eax
- xorl %ecx,%esi
- movl %ebp,%edi
- addl 0(%rsp),%edx
- vpxor %xmm1,%xmm0,%xmm0
- xorl %ebx,%eax
- shldl $5,%ebp,%ebp
- vpaddd %xmm7,%xmm11,%xmm9
- addl %esi,%edx
- andl %eax,%edi
- vpxor %xmm8,%xmm0,%xmm0
- xorl %ebx,%eax
- addl %ebp,%edx
- shrdl $7,%ebp,%ebp
- xorl %ebx,%edi
- vpsrld $30,%xmm0,%xmm8
- vmovdqa %xmm9,48(%rsp)
- movl %edx,%esi
- addl 4(%rsp),%ecx
- xorl %eax,%ebp
- shldl $5,%edx,%edx
- vpslld $2,%xmm0,%xmm0
- addl %edi,%ecx
- andl %ebp,%esi
- xorl %eax,%ebp
- addl %edx,%ecx
- shrdl $7,%edx,%edx
- xorl %eax,%esi
- movl %ecx,%edi
- addl 8(%rsp),%ebx
- vpor %xmm8,%xmm0,%xmm0
- xorl %ebp,%edx
- shldl $5,%ecx,%ecx
- addl %esi,%ebx
- andl %edx,%edi
- xorl %ebp,%edx
- addl %ecx,%ebx
- addl 12(%rsp),%eax
- xorl %ebp,%edi
- movl %ebx,%esi
- shldl $5,%ebx,%ebx
- addl %edi,%eax
- xorl %edx,%esi
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- vpalignr $8,%xmm7,%xmm0,%xmm8
- vpxor %xmm5,%xmm1,%xmm1
- addl 16(%rsp),%ebp
- xorl %ecx,%esi
- movl %eax,%edi
- shldl $5,%eax,%eax
- vpxor %xmm2,%xmm1,%xmm1
- addl %esi,%ebp
- xorl %ecx,%edi
- vpaddd %xmm0,%xmm11,%xmm9
- shrdl $7,%ebx,%ebx
- addl %eax,%ebp
- vpxor %xmm8,%xmm1,%xmm1
- addl 20(%rsp),%edx
- xorl %ebx,%edi
- movl %ebp,%esi
- shldl $5,%ebp,%ebp
- vpsrld $30,%xmm1,%xmm8
- vmovdqa %xmm9,0(%rsp)
- addl %edi,%edx
- xorl %ebx,%esi
- shrdl $7,%eax,%eax
- addl %ebp,%edx
- vpslld $2,%xmm1,%xmm1
- addl 24(%rsp),%ecx
- xorl %eax,%esi
- movl %edx,%edi
- shldl $5,%edx,%edx
- addl %esi,%ecx
- xorl %eax,%edi
- shrdl $7,%ebp,%ebp
- addl %edx,%ecx
- vpor %xmm8,%xmm1,%xmm1
- addl 28(%rsp),%ebx
- xorl %ebp,%edi
- movl %ecx,%esi
- shldl $5,%ecx,%ecx
- addl %edi,%ebx
- xorl %ebp,%esi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- vpalignr $8,%xmm0,%xmm1,%xmm8
- vpxor %xmm6,%xmm2,%xmm2
- addl 32(%rsp),%eax
- xorl %edx,%esi
- movl %ebx,%edi
- shldl $5,%ebx,%ebx
- vpxor %xmm3,%xmm2,%xmm2
- addl %esi,%eax
- xorl %edx,%edi
- vpaddd %xmm1,%xmm11,%xmm9
- vmovdqa 0(%r14),%xmm11
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- vpxor %xmm8,%xmm2,%xmm2
- addl 36(%rsp),%ebp
- xorl %ecx,%edi
- movl %eax,%esi
- shldl $5,%eax,%eax
- vpsrld $30,%xmm2,%xmm8
- vmovdqa %xmm9,16(%rsp)
- addl %edi,%ebp
- xorl %ecx,%esi
- shrdl $7,%ebx,%ebx
- addl %eax,%ebp
- vpslld $2,%xmm2,%xmm2
- addl 40(%rsp),%edx
- xorl %ebx,%esi
- movl %ebp,%edi
- shldl $5,%ebp,%ebp
- addl %esi,%edx
- xorl %ebx,%edi
- shrdl $7,%eax,%eax
- addl %ebp,%edx
- vpor %xmm8,%xmm2,%xmm2
- addl 44(%rsp),%ecx
- xorl %eax,%edi
- movl %edx,%esi
- shldl $5,%edx,%edx
- addl %edi,%ecx
- xorl %eax,%esi
- shrdl $7,%ebp,%ebp
- addl %edx,%ecx
- vpalignr $8,%xmm1,%xmm2,%xmm8
- vpxor %xmm7,%xmm3,%xmm3
- addl 48(%rsp),%ebx
- xorl %ebp,%esi
- movl %ecx,%edi
- shldl $5,%ecx,%ecx
- vpxor %xmm4,%xmm3,%xmm3
- addl %esi,%ebx
- xorl %ebp,%edi
- vpaddd %xmm2,%xmm11,%xmm9
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- vpxor %xmm8,%xmm3,%xmm3
- addl 52(%rsp),%eax
- xorl %edx,%edi
- movl %ebx,%esi
- shldl $5,%ebx,%ebx
- vpsrld $30,%xmm3,%xmm8
- vmovdqa %xmm9,32(%rsp)
- addl %edi,%eax
- xorl %edx,%esi
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- vpslld $2,%xmm3,%xmm3
- addl 56(%rsp),%ebp
- xorl %ecx,%esi
- movl %eax,%edi
- shldl $5,%eax,%eax
- addl %esi,%ebp
- xorl %ecx,%edi
- shrdl $7,%ebx,%ebx
- addl %eax,%ebp
- vpor %xmm8,%xmm3,%xmm3
- addl 60(%rsp),%edx
- xorl %ebx,%edi
- movl %ebp,%esi
- shldl $5,%ebp,%ebp
- addl %edi,%edx
- xorl %ebx,%esi
- shrdl $7,%eax,%eax
- addl %ebp,%edx
- vpalignr $8,%xmm2,%xmm3,%xmm8
- vpxor %xmm0,%xmm4,%xmm4
- addl 0(%rsp),%ecx
- xorl %eax,%esi
- movl %edx,%edi
- shldl $5,%edx,%edx
- vpxor %xmm5,%xmm4,%xmm4
- addl %esi,%ecx
- xorl %eax,%edi
- vpaddd %xmm3,%xmm11,%xmm9
- shrdl $7,%ebp,%ebp
- addl %edx,%ecx
- vpxor %xmm8,%xmm4,%xmm4
- addl 4(%rsp),%ebx
- xorl %ebp,%edi
- movl %ecx,%esi
- shldl $5,%ecx,%ecx
- vpsrld $30,%xmm4,%xmm8
- vmovdqa %xmm9,48(%rsp)
- addl %edi,%ebx
- xorl %ebp,%esi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- vpslld $2,%xmm4,%xmm4
- addl 8(%rsp),%eax
- xorl %edx,%esi
- movl %ebx,%edi
- shldl $5,%ebx,%ebx
- addl %esi,%eax
- xorl %edx,%edi
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- vpor %xmm8,%xmm4,%xmm4
- addl 12(%rsp),%ebp
- xorl %ecx,%edi
- movl %eax,%esi
- shldl $5,%eax,%eax
- addl %edi,%ebp
- xorl %ecx,%esi
- shrdl $7,%ebx,%ebx
- addl %eax,%ebp
- vpalignr $8,%xmm3,%xmm4,%xmm8
- vpxor %xmm1,%xmm5,%xmm5
- addl 16(%rsp),%edx
- xorl %ebx,%esi
- movl %ebp,%edi
- shldl $5,%ebp,%ebp
- vpxor %xmm6,%xmm5,%xmm5
- addl %esi,%edx
- xorl %ebx,%edi
- vpaddd %xmm4,%xmm11,%xmm9
- shrdl $7,%eax,%eax
- addl %ebp,%edx
- vpxor %xmm8,%xmm5,%xmm5
- addl 20(%rsp),%ecx
- xorl %eax,%edi
- movl %edx,%esi
- shldl $5,%edx,%edx
- vpsrld $30,%xmm5,%xmm8
- vmovdqa %xmm9,0(%rsp)
- addl %edi,%ecx
- xorl %eax,%esi
- shrdl $7,%ebp,%ebp
- addl %edx,%ecx
- vpslld $2,%xmm5,%xmm5
- addl 24(%rsp),%ebx
- xorl %ebp,%esi
- movl %ecx,%edi
- shldl $5,%ecx,%ecx
- addl %esi,%ebx
- xorl %ebp,%edi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- vpor %xmm8,%xmm5,%xmm5
- addl 28(%rsp),%eax
- shrdl $7,%ecx,%ecx
- movl %ebx,%esi
- xorl %edx,%edi
- shldl $5,%ebx,%ebx
- addl %edi,%eax
- xorl %ecx,%esi
- xorl %edx,%ecx
- addl %ebx,%eax
- vpalignr $8,%xmm4,%xmm5,%xmm8
- vpxor %xmm2,%xmm6,%xmm6
- addl 32(%rsp),%ebp
- andl %ecx,%esi
- xorl %edx,%ecx
- shrdl $7,%ebx,%ebx
- vpxor %xmm7,%xmm6,%xmm6
- movl %eax,%edi
- xorl %ecx,%esi
- vpaddd %xmm5,%xmm11,%xmm9
- shldl $5,%eax,%eax
- addl %esi,%ebp
- vpxor %xmm8,%xmm6,%xmm6
- xorl %ebx,%edi
- xorl %ecx,%ebx
- addl %eax,%ebp
- addl 36(%rsp),%edx
- vpsrld $30,%xmm6,%xmm8
- vmovdqa %xmm9,16(%rsp)
- andl %ebx,%edi
- xorl %ecx,%ebx
- shrdl $7,%eax,%eax
- movl %ebp,%esi
- vpslld $2,%xmm6,%xmm6
- xorl %ebx,%edi
- shldl $5,%ebp,%ebp
- addl %edi,%edx
- xorl %eax,%esi
- xorl %ebx,%eax
- addl %ebp,%edx
- addl 40(%rsp),%ecx
- andl %eax,%esi
- vpor %xmm8,%xmm6,%xmm6
- xorl %ebx,%eax
- shrdl $7,%ebp,%ebp
- movl %edx,%edi
- xorl %eax,%esi
- shldl $5,%edx,%edx
- addl %esi,%ecx
- xorl %ebp,%edi
- xorl %eax,%ebp
- addl %edx,%ecx
- addl 44(%rsp),%ebx
- andl %ebp,%edi
- xorl %eax,%ebp
- shrdl $7,%edx,%edx
- movl %ecx,%esi
- xorl %ebp,%edi
- shldl $5,%ecx,%ecx
- addl %edi,%ebx
- xorl %edx,%esi
- xorl %ebp,%edx
- addl %ecx,%ebx
- vpalignr $8,%xmm5,%xmm6,%xmm8
- vpxor %xmm3,%xmm7,%xmm7
- addl 48(%rsp),%eax
- andl %edx,%esi
- xorl %ebp,%edx
- shrdl $7,%ecx,%ecx
- vpxor %xmm0,%xmm7,%xmm7
- movl %ebx,%edi
- xorl %edx,%esi
- vpaddd %xmm6,%xmm11,%xmm9
- vmovdqa 32(%r14),%xmm11
- shldl $5,%ebx,%ebx
- addl %esi,%eax
- vpxor %xmm8,%xmm7,%xmm7
- xorl %ecx,%edi
- xorl %edx,%ecx
- addl %ebx,%eax
- addl 52(%rsp),%ebp
- vpsrld $30,%xmm7,%xmm8
- vmovdqa %xmm9,32(%rsp)
- andl %ecx,%edi
- xorl %edx,%ecx
- shrdl $7,%ebx,%ebx
- movl %eax,%esi
- vpslld $2,%xmm7,%xmm7
- xorl %ecx,%edi
- shldl $5,%eax,%eax
- addl %edi,%ebp
- xorl %ebx,%esi
- xorl %ecx,%ebx
- addl %eax,%ebp
- addl 56(%rsp),%edx
- andl %ebx,%esi
- vpor %xmm8,%xmm7,%xmm7
- xorl %ecx,%ebx
- shrdl $7,%eax,%eax
- movl %ebp,%edi
- xorl %ebx,%esi
- shldl $5,%ebp,%ebp
- addl %esi,%edx
- xorl %eax,%edi
- xorl %ebx,%eax
- addl %ebp,%edx
- addl 60(%rsp),%ecx
- andl %eax,%edi
- xorl %ebx,%eax
- shrdl $7,%ebp,%ebp
- movl %edx,%esi
- xorl %eax,%edi
- shldl $5,%edx,%edx
- addl %edi,%ecx
- xorl %ebp,%esi
- xorl %eax,%ebp
- addl %edx,%ecx
- vpalignr $8,%xmm6,%xmm7,%xmm8
- vpxor %xmm4,%xmm0,%xmm0
- addl 0(%rsp),%ebx
- andl %ebp,%esi
- xorl %eax,%ebp
- shrdl $7,%edx,%edx
- vpxor %xmm1,%xmm0,%xmm0
- movl %ecx,%edi
- xorl %ebp,%esi
- vpaddd %xmm7,%xmm11,%xmm9
- shldl $5,%ecx,%ecx
- addl %esi,%ebx
- vpxor %xmm8,%xmm0,%xmm0
- xorl %edx,%edi
- xorl %ebp,%edx
- addl %ecx,%ebx
- addl 4(%rsp),%eax
- vpsrld $30,%xmm0,%xmm8
- vmovdqa %xmm9,48(%rsp)
- andl %edx,%edi
- xorl %ebp,%edx
- shrdl $7,%ecx,%ecx
- movl %ebx,%esi
- vpslld $2,%xmm0,%xmm0
- xorl %edx,%edi
- shldl $5,%ebx,%ebx
- addl %edi,%eax
- xorl %ecx,%esi
- xorl %edx,%ecx
- addl %ebx,%eax
- addl 8(%rsp),%ebp
- andl %ecx,%esi
- vpor %xmm8,%xmm0,%xmm0
- xorl %edx,%ecx
- shrdl $7,%ebx,%ebx
- movl %eax,%edi
- xorl %ecx,%esi
- shldl $5,%eax,%eax
- addl %esi,%ebp
- xorl %ebx,%edi
- xorl %ecx,%ebx
- addl %eax,%ebp
- addl 12(%rsp),%edx
- andl %ebx,%edi
- xorl %ecx,%ebx
- shrdl $7,%eax,%eax
- movl %ebp,%esi
- xorl %ebx,%edi
- shldl $5,%ebp,%ebp
- addl %edi,%edx
- xorl %eax,%esi
- xorl %ebx,%eax
- addl %ebp,%edx
- vpalignr $8,%xmm7,%xmm0,%xmm8
- vpxor %xmm5,%xmm1,%xmm1
- addl 16(%rsp),%ecx
- andl %eax,%esi
- xorl %ebx,%eax
- shrdl $7,%ebp,%ebp
- vpxor %xmm2,%xmm1,%xmm1
- movl %edx,%edi
- xorl %eax,%esi
- vpaddd %xmm0,%xmm11,%xmm9
- shldl $5,%edx,%edx
- addl %esi,%ecx
- vpxor %xmm8,%xmm1,%xmm1
- xorl %ebp,%edi
- xorl %eax,%ebp
- addl %edx,%ecx
- addl 20(%rsp),%ebx
- vpsrld $30,%xmm1,%xmm8
- vmovdqa %xmm9,0(%rsp)
- andl %ebp,%edi
- xorl %eax,%ebp
- shrdl $7,%edx,%edx
- movl %ecx,%esi
- vpslld $2,%xmm1,%xmm1
- xorl %ebp,%edi
- shldl $5,%ecx,%ecx
- addl %edi,%ebx
- xorl %edx,%esi
- xorl %ebp,%edx
- addl %ecx,%ebx
- addl 24(%rsp),%eax
- andl %edx,%esi
- vpor %xmm8,%xmm1,%xmm1
- xorl %ebp,%edx
- shrdl $7,%ecx,%ecx
- movl %ebx,%edi
- xorl %edx,%esi
- shldl $5,%ebx,%ebx
- addl %esi,%eax
- xorl %ecx,%edi
- xorl %edx,%ecx
- addl %ebx,%eax
- addl 28(%rsp),%ebp
- andl %ecx,%edi
- xorl %edx,%ecx
- shrdl $7,%ebx,%ebx
- movl %eax,%esi
- xorl %ecx,%edi
- shldl $5,%eax,%eax
- addl %edi,%ebp
- xorl %ebx,%esi
- xorl %ecx,%ebx
- addl %eax,%ebp
- vpalignr $8,%xmm0,%xmm1,%xmm8
- vpxor %xmm6,%xmm2,%xmm2
- addl 32(%rsp),%edx
- andl %ebx,%esi
- xorl %ecx,%ebx
- shrdl $7,%eax,%eax
- vpxor %xmm3,%xmm2,%xmm2
- movl %ebp,%edi
- xorl %ebx,%esi
- vpaddd %xmm1,%xmm11,%xmm9
- shldl $5,%ebp,%ebp
- addl %esi,%edx
- vpxor %xmm8,%xmm2,%xmm2
- xorl %eax,%edi
- xorl %ebx,%eax
- addl %ebp,%edx
- addl 36(%rsp),%ecx
- vpsrld $30,%xmm2,%xmm8
- vmovdqa %xmm9,16(%rsp)
- andl %eax,%edi
- xorl %ebx,%eax
- shrdl $7,%ebp,%ebp
- movl %edx,%esi
- vpslld $2,%xmm2,%xmm2
- xorl %eax,%edi
- shldl $5,%edx,%edx
- addl %edi,%ecx
- xorl %ebp,%esi
- xorl %eax,%ebp
- addl %edx,%ecx
- addl 40(%rsp),%ebx
- andl %ebp,%esi
- vpor %xmm8,%xmm2,%xmm2
- xorl %eax,%ebp
- shrdl $7,%edx,%edx
- movl %ecx,%edi
- xorl %ebp,%esi
- shldl $5,%ecx,%ecx
- addl %esi,%ebx
- xorl %edx,%edi
- xorl %ebp,%edx
- addl %ecx,%ebx
- addl 44(%rsp),%eax
- andl %edx,%edi
- xorl %ebp,%edx
- shrdl $7,%ecx,%ecx
- movl %ebx,%esi
- xorl %edx,%edi
- shldl $5,%ebx,%ebx
- addl %edi,%eax
- xorl %edx,%esi
- addl %ebx,%eax
- vpalignr $8,%xmm1,%xmm2,%xmm8
- vpxor %xmm7,%xmm3,%xmm3
- addl 48(%rsp),%ebp
- xorl %ecx,%esi
- movl %eax,%edi
- shldl $5,%eax,%eax
- vpxor %xmm4,%xmm3,%xmm3
- addl %esi,%ebp
- xorl %ecx,%edi
- vpaddd %xmm2,%xmm11,%xmm9
- shrdl $7,%ebx,%ebx
- addl %eax,%ebp
- vpxor %xmm8,%xmm3,%xmm3
- addl 52(%rsp),%edx
- xorl %ebx,%edi
- movl %ebp,%esi
- shldl $5,%ebp,%ebp
- vpsrld $30,%xmm3,%xmm8
- vmovdqa %xmm9,32(%rsp)
- addl %edi,%edx
- xorl %ebx,%esi
- shrdl $7,%eax,%eax
- addl %ebp,%edx
- vpslld $2,%xmm3,%xmm3
- addl 56(%rsp),%ecx
- xorl %eax,%esi
- movl %edx,%edi
- shldl $5,%edx,%edx
- addl %esi,%ecx
- xorl %eax,%edi
- shrdl $7,%ebp,%ebp
- addl %edx,%ecx
- vpor %xmm8,%xmm3,%xmm3
- addl 60(%rsp),%ebx
- xorl %ebp,%edi
- movl %ecx,%esi
- shldl $5,%ecx,%ecx
- addl %edi,%ebx
- xorl %ebp,%esi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- addl 0(%rsp),%eax
- vpaddd %xmm3,%xmm11,%xmm9
- xorl %edx,%esi
- movl %ebx,%edi
- shldl $5,%ebx,%ebx
- addl %esi,%eax
- vmovdqa %xmm9,48(%rsp)
- xorl %edx,%edi
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- addl 4(%rsp),%ebp
- xorl %ecx,%edi
- movl %eax,%esi
- shldl $5,%eax,%eax
- addl %edi,%ebp
- xorl %ecx,%esi
- shrdl $7,%ebx,%ebx
- addl %eax,%ebp
- addl 8(%rsp),%edx
- xorl %ebx,%esi
- movl %ebp,%edi
- shldl $5,%ebp,%ebp
- addl %esi,%edx
- xorl %ebx,%edi
- shrdl $7,%eax,%eax
- addl %ebp,%edx
- addl 12(%rsp),%ecx
- xorl %eax,%edi
- movl %edx,%esi
- shldl $5,%edx,%edx
- addl %edi,%ecx
- xorl %eax,%esi
- shrdl $7,%ebp,%ebp
- addl %edx,%ecx
- cmpq %r10,%r9
- je .Ldone_avx
- vmovdqa 64(%r14),%xmm6
- vmovdqa -64(%r14),%xmm11
- vmovdqu 0(%r9),%xmm0
- vmovdqu 16(%r9),%xmm1
- vmovdqu 32(%r9),%xmm2
- vmovdqu 48(%r9),%xmm3
- vpshufb %xmm6,%xmm0,%xmm0
- addq $64,%r9
- addl 16(%rsp),%ebx
- xorl %ebp,%esi
- vpshufb %xmm6,%xmm1,%xmm1
- movl %ecx,%edi
- shldl $5,%ecx,%ecx
- vpaddd %xmm11,%xmm0,%xmm4
- addl %esi,%ebx
- xorl %ebp,%edi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- vmovdqa %xmm4,0(%rsp)
- addl 20(%rsp),%eax
- xorl %edx,%edi
- movl %ebx,%esi
- shldl $5,%ebx,%ebx
- addl %edi,%eax
- xorl %edx,%esi
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- addl 24(%rsp),%ebp
- xorl %ecx,%esi
- movl %eax,%edi
- shldl $5,%eax,%eax
- addl %esi,%ebp
- xorl %ecx,%edi
- shrdl $7,%ebx,%ebx
- addl %eax,%ebp
- addl 28(%rsp),%edx
- xorl %ebx,%edi
- movl %ebp,%esi
- shldl $5,%ebp,%ebp
- addl %edi,%edx
- xorl %ebx,%esi
- shrdl $7,%eax,%eax
- addl %ebp,%edx
- addl 32(%rsp),%ecx
- xorl %eax,%esi
- vpshufb %xmm6,%xmm2,%xmm2
- movl %edx,%edi
- shldl $5,%edx,%edx
- vpaddd %xmm11,%xmm1,%xmm5
- addl %esi,%ecx
- xorl %eax,%edi
- shrdl $7,%ebp,%ebp
- addl %edx,%ecx
- vmovdqa %xmm5,16(%rsp)
- addl 36(%rsp),%ebx
- xorl %ebp,%edi
- movl %ecx,%esi
- shldl $5,%ecx,%ecx
- addl %edi,%ebx
- xorl %ebp,%esi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- addl 40(%rsp),%eax
- xorl %edx,%esi
- movl %ebx,%edi
- shldl $5,%ebx,%ebx
- addl %esi,%eax
- xorl %edx,%edi
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- addl 44(%rsp),%ebp
- xorl %ecx,%edi
- movl %eax,%esi
- shldl $5,%eax,%eax
- addl %edi,%ebp
- xorl %ecx,%esi
- shrdl $7,%ebx,%ebx
- addl %eax,%ebp
- addl 48(%rsp),%edx
- xorl %ebx,%esi
- vpshufb %xmm6,%xmm3,%xmm3
- movl %ebp,%edi
- shldl $5,%ebp,%ebp
- vpaddd %xmm11,%xmm2,%xmm6
- addl %esi,%edx
- xorl %ebx,%edi
- shrdl $7,%eax,%eax
- addl %ebp,%edx
- vmovdqa %xmm6,32(%rsp)
- addl 52(%rsp),%ecx
- xorl %eax,%edi
- movl %edx,%esi
- shldl $5,%edx,%edx
- addl %edi,%ecx
- xorl %eax,%esi
- shrdl $7,%ebp,%ebp
- addl %edx,%ecx
- addl 56(%rsp),%ebx
- xorl %ebp,%esi
- movl %ecx,%edi
- shldl $5,%ecx,%ecx
- addl %esi,%ebx
- xorl %ebp,%edi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- addl 60(%rsp),%eax
- xorl %edx,%edi
- movl %ebx,%esi
- shldl $5,%ebx,%ebx
- addl %edi,%eax
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- addl 0(%r8),%eax
- addl 4(%r8),%esi
- addl 8(%r8),%ecx
- addl 12(%r8),%edx
- movl %eax,0(%r8)
- addl 16(%r8),%ebp
- movl %esi,4(%r8)
- movl %esi,%ebx
- movl %ecx,8(%r8)
- movl %ecx,%edi
- movl %edx,12(%r8)
- xorl %edx,%edi
- movl %ebp,16(%r8)
- andl %edi,%esi
- jmp .Loop_avx
-
-.align 16
-.Ldone_avx:
- addl 16(%rsp),%ebx
- xorl %ebp,%esi
- movl %ecx,%edi
- shldl $5,%ecx,%ecx
- addl %esi,%ebx
- xorl %ebp,%edi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- addl 20(%rsp),%eax
- xorl %edx,%edi
- movl %ebx,%esi
- shldl $5,%ebx,%ebx
- addl %edi,%eax
- xorl %edx,%esi
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- addl 24(%rsp),%ebp
- xorl %ecx,%esi
- movl %eax,%edi
- shldl $5,%eax,%eax
- addl %esi,%ebp
- xorl %ecx,%edi
- shrdl $7,%ebx,%ebx
- addl %eax,%ebp
- addl 28(%rsp),%edx
- xorl %ebx,%edi
- movl %ebp,%esi
- shldl $5,%ebp,%ebp
- addl %edi,%edx
- xorl %ebx,%esi
- shrdl $7,%eax,%eax
- addl %ebp,%edx
- addl 32(%rsp),%ecx
- xorl %eax,%esi
- movl %edx,%edi
- shldl $5,%edx,%edx
- addl %esi,%ecx
- xorl %eax,%edi
- shrdl $7,%ebp,%ebp
- addl %edx,%ecx
- addl 36(%rsp),%ebx
- xorl %ebp,%edi
- movl %ecx,%esi
- shldl $5,%ecx,%ecx
- addl %edi,%ebx
- xorl %ebp,%esi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- addl 40(%rsp),%eax
- xorl %edx,%esi
- movl %ebx,%edi
- shldl $5,%ebx,%ebx
- addl %esi,%eax
- xorl %edx,%edi
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- addl 44(%rsp),%ebp
- xorl %ecx,%edi
- movl %eax,%esi
- shldl $5,%eax,%eax
- addl %edi,%ebp
- xorl %ecx,%esi
- shrdl $7,%ebx,%ebx
- addl %eax,%ebp
- addl 48(%rsp),%edx
- xorl %ebx,%esi
- movl %ebp,%edi
- shldl $5,%ebp,%ebp
- addl %esi,%edx
- xorl %ebx,%edi
- shrdl $7,%eax,%eax
- addl %ebp,%edx
- addl 52(%rsp),%ecx
- xorl %eax,%edi
- movl %edx,%esi
- shldl $5,%edx,%edx
- addl %edi,%ecx
- xorl %eax,%esi
- shrdl $7,%ebp,%ebp
- addl %edx,%ecx
- addl 56(%rsp),%ebx
- xorl %ebp,%esi
- movl %ecx,%edi
- shldl $5,%ecx,%ecx
- addl %esi,%ebx
- xorl %ebp,%edi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- addl 60(%rsp),%eax
- xorl %edx,%edi
- movl %ebx,%esi
- shldl $5,%ebx,%ebx
- addl %edi,%eax
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- vzeroupper
-
- addl 0(%r8),%eax
- addl 4(%r8),%esi
- addl 8(%r8),%ecx
- movl %eax,0(%r8)
- addl 12(%r8),%edx
- movl %esi,4(%r8)
- addl 16(%r8),%ebp
- movl %ecx,8(%r8)
- movl %edx,12(%r8)
- movl %ebp,16(%r8)
- movq -40(%r11),%r14
-.cfi_restore %r14
- movq -32(%r11),%r13
-.cfi_restore %r13
- movq -24(%r11),%r12
-.cfi_restore %r12
- movq -16(%r11),%rbp
-.cfi_restore %rbp
- movq -8(%r11),%rbx
-.cfi_restore %rbx
- leaq (%r11),%rsp
-.cfi_def_cfa_register %rsp
-.Lepilogue_avx:
- .byte 0xf3,0xc3
-.cfi_endproc
-.size sha1_block_data_order_avx,.-sha1_block_data_order_avx
-.type sha1_block_data_order_avx2,@function
-.align 16
-sha1_block_data_order_avx2:
-_avx2_shortcut:
-.cfi_startproc
- movq %rsp,%r11
-.cfi_def_cfa_register %r11
- pushq %rbx
-.cfi_offset %rbx,-16
- pushq %rbp
-.cfi_offset %rbp,-24
- pushq %r12
-.cfi_offset %r12,-32
- pushq %r13
-.cfi_offset %r13,-40
- pushq %r14
-.cfi_offset %r14,-48
- vzeroupper
- movq %rdi,%r8
- movq %rsi,%r9
- movq %rdx,%r10
-
- leaq -640(%rsp),%rsp
- shlq $6,%r10
- leaq 64(%r9),%r13
- andq $-128,%rsp
- addq %r9,%r10
- leaq K_XX_XX+64(%rip),%r14
-
- movl 0(%r8),%eax
- cmpq %r10,%r13
- cmovaeq %r9,%r13
- movl 4(%r8),%ebp
- movl 8(%r8),%ecx
- movl 12(%r8),%edx
- movl 16(%r8),%esi
- vmovdqu 64(%r14),%ymm6
-
- vmovdqu (%r9),%xmm0
- vmovdqu 16(%r9),%xmm1
- vmovdqu 32(%r9),%xmm2
- vmovdqu 48(%r9),%xmm3
- leaq 64(%r9),%r9
- vinserti128 $1,(%r13),%ymm0,%ymm0
- vinserti128 $1,16(%r13),%ymm1,%ymm1
- vpshufb %ymm6,%ymm0,%ymm0
- vinserti128 $1,32(%r13),%ymm2,%ymm2
- vpshufb %ymm6,%ymm1,%ymm1
- vinserti128 $1,48(%r13),%ymm3,%ymm3
- vpshufb %ymm6,%ymm2,%ymm2
- vmovdqu -64(%r14),%ymm11
- vpshufb %ymm6,%ymm3,%ymm3
-
- vpaddd %ymm11,%ymm0,%ymm4
- vpaddd %ymm11,%ymm1,%ymm5
- vmovdqu %ymm4,0(%rsp)
- vpaddd %ymm11,%ymm2,%ymm6
- vmovdqu %ymm5,32(%rsp)
- vpaddd %ymm11,%ymm3,%ymm7
- vmovdqu %ymm6,64(%rsp)
- vmovdqu %ymm7,96(%rsp)
- vpalignr $8,%ymm0,%ymm1,%ymm4
- vpsrldq $4,%ymm3,%ymm8
- vpxor %ymm0,%ymm4,%ymm4
- vpxor %ymm2,%ymm8,%ymm8
- vpxor %ymm8,%ymm4,%ymm4
- vpsrld $31,%ymm4,%ymm8
- vpslldq $12,%ymm4,%ymm10
- vpaddd %ymm4,%ymm4,%ymm4
- vpsrld $30,%ymm10,%ymm9
- vpor %ymm8,%ymm4,%ymm4
- vpslld $2,%ymm10,%ymm10
- vpxor %ymm9,%ymm4,%ymm4
- vpxor %ymm10,%ymm4,%ymm4
- vpaddd %ymm11,%ymm4,%ymm9
- vmovdqu %ymm9,128(%rsp)
- vpalignr $8,%ymm1,%ymm2,%ymm5
- vpsrldq $4,%ymm4,%ymm8
- vpxor %ymm1,%ymm5,%ymm5
- vpxor %ymm3,%ymm8,%ymm8
- vpxor %ymm8,%ymm5,%ymm5
- vpsrld $31,%ymm5,%ymm8
- vmovdqu -32(%r14),%ymm11
- vpslldq $12,%ymm5,%ymm10
- vpaddd %ymm5,%ymm5,%ymm5
- vpsrld $30,%ymm10,%ymm9
- vpor %ymm8,%ymm5,%ymm5
- vpslld $2,%ymm10,%ymm10
- vpxor %ymm9,%ymm5,%ymm5
- vpxor %ymm10,%ymm5,%ymm5
- vpaddd %ymm11,%ymm5,%ymm9
- vmovdqu %ymm9,160(%rsp)
- vpalignr $8,%ymm2,%ymm3,%ymm6
- vpsrldq $4,%ymm5,%ymm8
- vpxor %ymm2,%ymm6,%ymm6
- vpxor %ymm4,%ymm8,%ymm8
- vpxor %ymm8,%ymm6,%ymm6
- vpsrld $31,%ymm6,%ymm8
- vpslldq $12,%ymm6,%ymm10
- vpaddd %ymm6,%ymm6,%ymm6
- vpsrld $30,%ymm10,%ymm9
- vpor %ymm8,%ymm6,%ymm6
- vpslld $2,%ymm10,%ymm10
- vpxor %ymm9,%ymm6,%ymm6
- vpxor %ymm10,%ymm6,%ymm6
- vpaddd %ymm11,%ymm6,%ymm9
- vmovdqu %ymm9,192(%rsp)
- vpalignr $8,%ymm3,%ymm4,%ymm7
- vpsrldq $4,%ymm6,%ymm8
- vpxor %ymm3,%ymm7,%ymm7
- vpxor %ymm5,%ymm8,%ymm8
- vpxor %ymm8,%ymm7,%ymm7
- vpsrld $31,%ymm7,%ymm8
- vpslldq $12,%ymm7,%ymm10
- vpaddd %ymm7,%ymm7,%ymm7
- vpsrld $30,%ymm10,%ymm9
- vpor %ymm8,%ymm7,%ymm7
- vpslld $2,%ymm10,%ymm10
- vpxor %ymm9,%ymm7,%ymm7
- vpxor %ymm10,%ymm7,%ymm7
- vpaddd %ymm11,%ymm7,%ymm9
- vmovdqu %ymm9,224(%rsp)
- leaq 128(%rsp),%r13
- jmp .Loop_avx2
-.align 32
-.Loop_avx2:
- rorxl $2,%ebp,%ebx
- andnl %edx,%ebp,%edi
- andl %ecx,%ebp
- xorl %edi,%ebp
- jmp .Lalign32_1
-.align 32
-.Lalign32_1:
- vpalignr $8,%ymm6,%ymm7,%ymm8
- vpxor %ymm4,%ymm0,%ymm0
- addl -128(%r13),%esi
- andnl %ecx,%eax,%edi
- vpxor %ymm1,%ymm0,%ymm0
- addl %ebp,%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- vpxor %ymm8,%ymm0,%ymm0
- andl %ebx,%eax
- addl %r12d,%esi
- xorl %edi,%eax
- vpsrld $30,%ymm0,%ymm8
- vpslld $2,%ymm0,%ymm0
- addl -124(%r13),%edx
- andnl %ebx,%esi,%edi
- addl %eax,%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- andl %ebp,%esi
- vpor %ymm8,%ymm0,%ymm0
- addl %r12d,%edx
- xorl %edi,%esi
- addl -120(%r13),%ecx
- andnl %ebp,%edx,%edi
- vpaddd %ymm11,%ymm0,%ymm9
- addl %esi,%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- andl %eax,%edx
- vmovdqu %ymm9,256(%rsp)
- addl %r12d,%ecx
- xorl %edi,%edx
- addl -116(%r13),%ebx
- andnl %eax,%ecx,%edi
- addl %edx,%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- andl %esi,%ecx
- addl %r12d,%ebx
- xorl %edi,%ecx
- addl -96(%r13),%ebp
- andnl %esi,%ebx,%edi
- addl %ecx,%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- andl %edx,%ebx
- addl %r12d,%ebp
- xorl %edi,%ebx
- vpalignr $8,%ymm7,%ymm0,%ymm8
- vpxor %ymm5,%ymm1,%ymm1
- addl -92(%r13),%eax
- andnl %edx,%ebp,%edi
- vpxor %ymm2,%ymm1,%ymm1
- addl %ebx,%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- vpxor %ymm8,%ymm1,%ymm1
- andl %ecx,%ebp
- addl %r12d,%eax
- xorl %edi,%ebp
- vpsrld $30,%ymm1,%ymm8
- vpslld $2,%ymm1,%ymm1
- addl -88(%r13),%esi
- andnl %ecx,%eax,%edi
- addl %ebp,%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- andl %ebx,%eax
- vpor %ymm8,%ymm1,%ymm1
- addl %r12d,%esi
- xorl %edi,%eax
- addl -84(%r13),%edx
- andnl %ebx,%esi,%edi
- vpaddd %ymm11,%ymm1,%ymm9
- addl %eax,%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- andl %ebp,%esi
- vmovdqu %ymm9,288(%rsp)
- addl %r12d,%edx
- xorl %edi,%esi
- addl -64(%r13),%ecx
- andnl %ebp,%edx,%edi
- addl %esi,%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- andl %eax,%edx
- addl %r12d,%ecx
- xorl %edi,%edx
- addl -60(%r13),%ebx
- andnl %eax,%ecx,%edi
- addl %edx,%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- andl %esi,%ecx
- addl %r12d,%ebx
- xorl %edi,%ecx
- vpalignr $8,%ymm0,%ymm1,%ymm8
- vpxor %ymm6,%ymm2,%ymm2
- addl -56(%r13),%ebp
- andnl %esi,%ebx,%edi
- vpxor %ymm3,%ymm2,%ymm2
- vmovdqu 0(%r14),%ymm11
- addl %ecx,%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- vpxor %ymm8,%ymm2,%ymm2
- andl %edx,%ebx
- addl %r12d,%ebp
- xorl %edi,%ebx
- vpsrld $30,%ymm2,%ymm8
- vpslld $2,%ymm2,%ymm2
- addl -52(%r13),%eax
- andnl %edx,%ebp,%edi
- addl %ebx,%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- andl %ecx,%ebp
- vpor %ymm8,%ymm2,%ymm2
- addl %r12d,%eax
- xorl %edi,%ebp
- addl -32(%r13),%esi
- andnl %ecx,%eax,%edi
- vpaddd %ymm11,%ymm2,%ymm9
- addl %ebp,%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- andl %ebx,%eax
- vmovdqu %ymm9,320(%rsp)
- addl %r12d,%esi
- xorl %edi,%eax
- addl -28(%r13),%edx
- andnl %ebx,%esi,%edi
- addl %eax,%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- andl %ebp,%esi
- addl %r12d,%edx
- xorl %edi,%esi
- addl -24(%r13),%ecx
- andnl %ebp,%edx,%edi
- addl %esi,%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- andl %eax,%edx
- addl %r12d,%ecx
- xorl %edi,%edx
- vpalignr $8,%ymm1,%ymm2,%ymm8
- vpxor %ymm7,%ymm3,%ymm3
- addl -20(%r13),%ebx
- andnl %eax,%ecx,%edi
- vpxor %ymm4,%ymm3,%ymm3
- addl %edx,%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- vpxor %ymm8,%ymm3,%ymm3
- andl %esi,%ecx
- addl %r12d,%ebx
- xorl %edi,%ecx
- vpsrld $30,%ymm3,%ymm8
- vpslld $2,%ymm3,%ymm3
- addl 0(%r13),%ebp
- andnl %esi,%ebx,%edi
- addl %ecx,%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- andl %edx,%ebx
- vpor %ymm8,%ymm3,%ymm3
- addl %r12d,%ebp
- xorl %edi,%ebx
- addl 4(%r13),%eax
- andnl %edx,%ebp,%edi
- vpaddd %ymm11,%ymm3,%ymm9
- addl %ebx,%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- andl %ecx,%ebp
- vmovdqu %ymm9,352(%rsp)
- addl %r12d,%eax
- xorl %edi,%ebp
- addl 8(%r13),%esi
- andnl %ecx,%eax,%edi
- addl %ebp,%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- andl %ebx,%eax
- addl %r12d,%esi
- xorl %edi,%eax
- addl 12(%r13),%edx
- leal (%rdx,%rax,1),%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- xorl %ebp,%esi
- addl %r12d,%edx
- xorl %ebx,%esi
- vpalignr $8,%ymm2,%ymm3,%ymm8
- vpxor %ymm0,%ymm4,%ymm4
- addl 32(%r13),%ecx
- leal (%rcx,%rsi,1),%ecx
- vpxor %ymm5,%ymm4,%ymm4
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- xorl %eax,%edx
- vpxor %ymm8,%ymm4,%ymm4
- addl %r12d,%ecx
- xorl %ebp,%edx
- addl 36(%r13),%ebx
- vpsrld $30,%ymm4,%ymm8
- vpslld $2,%ymm4,%ymm4
- leal (%rbx,%rdx,1),%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- xorl %esi,%ecx
- addl %r12d,%ebx
- xorl %eax,%ecx
- vpor %ymm8,%ymm4,%ymm4
- addl 40(%r13),%ebp
- leal (%rcx,%rbp,1),%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- vpaddd %ymm11,%ymm4,%ymm9
- xorl %edx,%ebx
- addl %r12d,%ebp
- xorl %esi,%ebx
- addl 44(%r13),%eax
- vmovdqu %ymm9,384(%rsp)
- leal (%rax,%rbx,1),%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- xorl %ecx,%ebp
- addl %r12d,%eax
- xorl %edx,%ebp
- addl 64(%r13),%esi
- leal (%rsi,%rbp,1),%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- xorl %ebx,%eax
- addl %r12d,%esi
- xorl %ecx,%eax
- vpalignr $8,%ymm3,%ymm4,%ymm8
- vpxor %ymm1,%ymm5,%ymm5
- addl 68(%r13),%edx
- leal (%rdx,%rax,1),%edx
- vpxor %ymm6,%ymm5,%ymm5
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- xorl %ebp,%esi
- vpxor %ymm8,%ymm5,%ymm5
- addl %r12d,%edx
- xorl %ebx,%esi
- addl 72(%r13),%ecx
- vpsrld $30,%ymm5,%ymm8
- vpslld $2,%ymm5,%ymm5
- leal (%rcx,%rsi,1),%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- xorl %eax,%edx
- addl %r12d,%ecx
- xorl %ebp,%edx
- vpor %ymm8,%ymm5,%ymm5
- addl 76(%r13),%ebx
- leal (%rbx,%rdx,1),%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- vpaddd %ymm11,%ymm5,%ymm9
- xorl %esi,%ecx
- addl %r12d,%ebx
- xorl %eax,%ecx
- addl 96(%r13),%ebp
- vmovdqu %ymm9,416(%rsp)
- leal (%rcx,%rbp,1),%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- xorl %edx,%ebx
- addl %r12d,%ebp
- xorl %esi,%ebx
- addl 100(%r13),%eax
- leal (%rax,%rbx,1),%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- xorl %ecx,%ebp
- addl %r12d,%eax
- xorl %edx,%ebp
- vpalignr $8,%ymm4,%ymm5,%ymm8
- vpxor %ymm2,%ymm6,%ymm6
- addl 104(%r13),%esi
- leal (%rsi,%rbp,1),%esi
- vpxor %ymm7,%ymm6,%ymm6
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- xorl %ebx,%eax
- vpxor %ymm8,%ymm6,%ymm6
- addl %r12d,%esi
- xorl %ecx,%eax
- addl 108(%r13),%edx
- leaq 256(%r13),%r13
- vpsrld $30,%ymm6,%ymm8
- vpslld $2,%ymm6,%ymm6
- leal (%rdx,%rax,1),%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- xorl %ebp,%esi
- addl %r12d,%edx
- xorl %ebx,%esi
- vpor %ymm8,%ymm6,%ymm6
- addl -128(%r13),%ecx
- leal (%rcx,%rsi,1),%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- vpaddd %ymm11,%ymm6,%ymm9
- xorl %eax,%edx
- addl %r12d,%ecx
- xorl %ebp,%edx
- addl -124(%r13),%ebx
- vmovdqu %ymm9,448(%rsp)
- leal (%rbx,%rdx,1),%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- xorl %esi,%ecx
- addl %r12d,%ebx
- xorl %eax,%ecx
- addl -120(%r13),%ebp
- leal (%rcx,%rbp,1),%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- xorl %edx,%ebx
- addl %r12d,%ebp
- xorl %esi,%ebx
- vpalignr $8,%ymm5,%ymm6,%ymm8
- vpxor %ymm3,%ymm7,%ymm7
- addl -116(%r13),%eax
- leal (%rax,%rbx,1),%eax
- vpxor %ymm0,%ymm7,%ymm7
- vmovdqu 32(%r14),%ymm11
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- xorl %ecx,%ebp
- vpxor %ymm8,%ymm7,%ymm7
- addl %r12d,%eax
- xorl %edx,%ebp
- addl -96(%r13),%esi
- vpsrld $30,%ymm7,%ymm8
- vpslld $2,%ymm7,%ymm7
- leal (%rsi,%rbp,1),%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- xorl %ebx,%eax
- addl %r12d,%esi
- xorl %ecx,%eax
- vpor %ymm8,%ymm7,%ymm7
- addl -92(%r13),%edx
- leal (%rdx,%rax,1),%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- vpaddd %ymm11,%ymm7,%ymm9
- xorl %ebp,%esi
- addl %r12d,%edx
- xorl %ebx,%esi
- addl -88(%r13),%ecx
- vmovdqu %ymm9,480(%rsp)
- leal (%rcx,%rsi,1),%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- xorl %eax,%edx
- addl %r12d,%ecx
- xorl %ebp,%edx
- addl -84(%r13),%ebx
- movl %esi,%edi
- xorl %eax,%edi
- leal (%rbx,%rdx,1),%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- xorl %esi,%ecx
- addl %r12d,%ebx
- andl %edi,%ecx
- jmp .Lalign32_2
-.align 32
-.Lalign32_2:
- vpalignr $8,%ymm6,%ymm7,%ymm8
- vpxor %ymm4,%ymm0,%ymm0
- addl -64(%r13),%ebp
- xorl %esi,%ecx
- vpxor %ymm1,%ymm0,%ymm0
- movl %edx,%edi
- xorl %esi,%edi
- leal (%rcx,%rbp,1),%ebp
- vpxor %ymm8,%ymm0,%ymm0
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- xorl %edx,%ebx
- vpsrld $30,%ymm0,%ymm8
- vpslld $2,%ymm0,%ymm0
- addl %r12d,%ebp
- andl %edi,%ebx
- addl -60(%r13),%eax
- xorl %edx,%ebx
- movl %ecx,%edi
- xorl %edx,%edi
- vpor %ymm8,%ymm0,%ymm0
- leal (%rax,%rbx,1),%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- xorl %ecx,%ebp
- vpaddd %ymm11,%ymm0,%ymm9
- addl %r12d,%eax
- andl %edi,%ebp
- addl -56(%r13),%esi
- xorl %ecx,%ebp
- vmovdqu %ymm9,512(%rsp)
- movl %ebx,%edi
- xorl %ecx,%edi
- leal (%rsi,%rbp,1),%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- xorl %ebx,%eax
- addl %r12d,%esi
- andl %edi,%eax
- addl -52(%r13),%edx
- xorl %ebx,%eax
- movl %ebp,%edi
- xorl %ebx,%edi
- leal (%rdx,%rax,1),%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- xorl %ebp,%esi
- addl %r12d,%edx
- andl %edi,%esi
- addl -32(%r13),%ecx
- xorl %ebp,%esi
- movl %eax,%edi
- xorl %ebp,%edi
- leal (%rcx,%rsi,1),%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- xorl %eax,%edx
- addl %r12d,%ecx
- andl %edi,%edx
- vpalignr $8,%ymm7,%ymm0,%ymm8
- vpxor %ymm5,%ymm1,%ymm1
- addl -28(%r13),%ebx
- xorl %eax,%edx
- vpxor %ymm2,%ymm1,%ymm1
- movl %esi,%edi
- xorl %eax,%edi
- leal (%rbx,%rdx,1),%ebx
- vpxor %ymm8,%ymm1,%ymm1
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- xorl %esi,%ecx
- vpsrld $30,%ymm1,%ymm8
- vpslld $2,%ymm1,%ymm1
- addl %r12d,%ebx
- andl %edi,%ecx
- addl -24(%r13),%ebp
- xorl %esi,%ecx
- movl %edx,%edi
- xorl %esi,%edi
- vpor %ymm8,%ymm1,%ymm1
- leal (%rcx,%rbp,1),%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- xorl %edx,%ebx
- vpaddd %ymm11,%ymm1,%ymm9
- addl %r12d,%ebp
- andl %edi,%ebx
- addl -20(%r13),%eax
- xorl %edx,%ebx
- vmovdqu %ymm9,544(%rsp)
- movl %ecx,%edi
- xorl %edx,%edi
- leal (%rax,%rbx,1),%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- xorl %ecx,%ebp
- addl %r12d,%eax
- andl %edi,%ebp
- addl 0(%r13),%esi
- xorl %ecx,%ebp
- movl %ebx,%edi
- xorl %ecx,%edi
- leal (%rsi,%rbp,1),%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- xorl %ebx,%eax
- addl %r12d,%esi
- andl %edi,%eax
- addl 4(%r13),%edx
- xorl %ebx,%eax
- movl %ebp,%edi
- xorl %ebx,%edi
- leal (%rdx,%rax,1),%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- xorl %ebp,%esi
- addl %r12d,%edx
- andl %edi,%esi
- vpalignr $8,%ymm0,%ymm1,%ymm8
- vpxor %ymm6,%ymm2,%ymm2
- addl 8(%r13),%ecx
- xorl %ebp,%esi
- vpxor %ymm3,%ymm2,%ymm2
- movl %eax,%edi
- xorl %ebp,%edi
- leal (%rcx,%rsi,1),%ecx
- vpxor %ymm8,%ymm2,%ymm2
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- xorl %eax,%edx
- vpsrld $30,%ymm2,%ymm8
- vpslld $2,%ymm2,%ymm2
- addl %r12d,%ecx
- andl %edi,%edx
- addl 12(%r13),%ebx
- xorl %eax,%edx
- movl %esi,%edi
- xorl %eax,%edi
- vpor %ymm8,%ymm2,%ymm2
- leal (%rbx,%rdx,1),%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- xorl %esi,%ecx
- vpaddd %ymm11,%ymm2,%ymm9
- addl %r12d,%ebx
- andl %edi,%ecx
- addl 32(%r13),%ebp
- xorl %esi,%ecx
- vmovdqu %ymm9,576(%rsp)
- movl %edx,%edi
- xorl %esi,%edi
- leal (%rcx,%rbp,1),%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- xorl %edx,%ebx
- addl %r12d,%ebp
- andl %edi,%ebx
- addl 36(%r13),%eax
- xorl %edx,%ebx
- movl %ecx,%edi
- xorl %edx,%edi
- leal (%rax,%rbx,1),%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- xorl %ecx,%ebp
- addl %r12d,%eax
- andl %edi,%ebp
- addl 40(%r13),%esi
- xorl %ecx,%ebp
- movl %ebx,%edi
- xorl %ecx,%edi
- leal (%rsi,%rbp,1),%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- xorl %ebx,%eax
- addl %r12d,%esi
- andl %edi,%eax
- vpalignr $8,%ymm1,%ymm2,%ymm8
- vpxor %ymm7,%ymm3,%ymm3
- addl 44(%r13),%edx
- xorl %ebx,%eax
- vpxor %ymm4,%ymm3,%ymm3
- movl %ebp,%edi
- xorl %ebx,%edi
- leal (%rdx,%rax,1),%edx
- vpxor %ymm8,%ymm3,%ymm3
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- xorl %ebp,%esi
- vpsrld $30,%ymm3,%ymm8
- vpslld $2,%ymm3,%ymm3
- addl %r12d,%edx
- andl %edi,%esi
- addl 64(%r13),%ecx
- xorl %ebp,%esi
- movl %eax,%edi
- xorl %ebp,%edi
- vpor %ymm8,%ymm3,%ymm3
- leal (%rcx,%rsi,1),%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- xorl %eax,%edx
- vpaddd %ymm11,%ymm3,%ymm9
- addl %r12d,%ecx
- andl %edi,%edx
- addl 68(%r13),%ebx
- xorl %eax,%edx
- vmovdqu %ymm9,608(%rsp)
- movl %esi,%edi
- xorl %eax,%edi
- leal (%rbx,%rdx,1),%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- xorl %esi,%ecx
- addl %r12d,%ebx
- andl %edi,%ecx
- addl 72(%r13),%ebp
- xorl %esi,%ecx
- movl %edx,%edi
- xorl %esi,%edi
- leal (%rcx,%rbp,1),%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- xorl %edx,%ebx
- addl %r12d,%ebp
- andl %edi,%ebx
- addl 76(%r13),%eax
- xorl %edx,%ebx
- leal (%rax,%rbx,1),%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- xorl %ecx,%ebp
- addl %r12d,%eax
- xorl %edx,%ebp
- addl 96(%r13),%esi
- leal (%rsi,%rbp,1),%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- xorl %ebx,%eax
- addl %r12d,%esi
- xorl %ecx,%eax
- addl 100(%r13),%edx
- leal (%rdx,%rax,1),%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- xorl %ebp,%esi
- addl %r12d,%edx
- xorl %ebx,%esi
- addl 104(%r13),%ecx
- leal (%rcx,%rsi,1),%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- xorl %eax,%edx
- addl %r12d,%ecx
- xorl %ebp,%edx
- addl 108(%r13),%ebx
- leaq 256(%r13),%r13
- leal (%rbx,%rdx,1),%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- xorl %esi,%ecx
- addl %r12d,%ebx
- xorl %eax,%ecx
- addl -128(%r13),%ebp
- leal (%rcx,%rbp,1),%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- xorl %edx,%ebx
- addl %r12d,%ebp
- xorl %esi,%ebx
- addl -124(%r13),%eax
- leal (%rax,%rbx,1),%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- xorl %ecx,%ebp
- addl %r12d,%eax
- xorl %edx,%ebp
- addl -120(%r13),%esi
- leal (%rsi,%rbp,1),%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- xorl %ebx,%eax
- addl %r12d,%esi
- xorl %ecx,%eax
- addl -116(%r13),%edx
- leal (%rdx,%rax,1),%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- xorl %ebp,%esi
- addl %r12d,%edx
- xorl %ebx,%esi
- addl -96(%r13),%ecx
- leal (%rcx,%rsi,1),%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- xorl %eax,%edx
- addl %r12d,%ecx
- xorl %ebp,%edx
- addl -92(%r13),%ebx
- leal (%rbx,%rdx,1),%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- xorl %esi,%ecx
- addl %r12d,%ebx
- xorl %eax,%ecx
- addl -88(%r13),%ebp
- leal (%rcx,%rbp,1),%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- xorl %edx,%ebx
- addl %r12d,%ebp
- xorl %esi,%ebx
- addl -84(%r13),%eax
- leal (%rax,%rbx,1),%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- xorl %ecx,%ebp
- addl %r12d,%eax
- xorl %edx,%ebp
- addl -64(%r13),%esi
- leal (%rsi,%rbp,1),%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- xorl %ebx,%eax
- addl %r12d,%esi
- xorl %ecx,%eax
- addl -60(%r13),%edx
- leal (%rdx,%rax,1),%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- xorl %ebp,%esi
- addl %r12d,%edx
- xorl %ebx,%esi
- addl -56(%r13),%ecx
- leal (%rcx,%rsi,1),%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- xorl %eax,%edx
- addl %r12d,%ecx
- xorl %ebp,%edx
- addl -52(%r13),%ebx
- leal (%rbx,%rdx,1),%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- xorl %esi,%ecx
- addl %r12d,%ebx
- xorl %eax,%ecx
- addl -32(%r13),%ebp
- leal (%rcx,%rbp,1),%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- xorl %edx,%ebx
- addl %r12d,%ebp
- xorl %esi,%ebx
- addl -28(%r13),%eax
- leal (%rax,%rbx,1),%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- xorl %ecx,%ebp
- addl %r12d,%eax
- xorl %edx,%ebp
- addl -24(%r13),%esi
- leal (%rsi,%rbp,1),%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- xorl %ebx,%eax
- addl %r12d,%esi
- xorl %ecx,%eax
- addl -20(%r13),%edx
- leal (%rdx,%rax,1),%edx
- rorxl $27,%esi,%r12d
- addl %r12d,%edx
- leaq 128(%r9),%r13
- leaq 128(%r9),%rdi
- cmpq %r10,%r13
- cmovaeq %r9,%r13
-
-
- addl 0(%r8),%edx
- addl 4(%r8),%esi
- addl 8(%r8),%ebp
- movl %edx,0(%r8)
- addl 12(%r8),%ebx
- movl %esi,4(%r8)
- movl %edx,%eax
- addl 16(%r8),%ecx
- movl %ebp,%r12d
- movl %ebp,8(%r8)
- movl %ebx,%edx
-
- movl %ebx,12(%r8)
- movl %esi,%ebp
- movl %ecx,16(%r8)
-
- movl %ecx,%esi
- movl %r12d,%ecx
-
-
- cmpq %r10,%r9
- je .Ldone_avx2
- vmovdqu 64(%r14),%ymm6
- cmpq %r10,%rdi
- ja .Last_avx2
-
- vmovdqu -64(%rdi),%xmm0
- vmovdqu -48(%rdi),%xmm1
- vmovdqu -32(%rdi),%xmm2
- vmovdqu -16(%rdi),%xmm3
- vinserti128 $1,0(%r13),%ymm0,%ymm0
- vinserti128 $1,16(%r13),%ymm1,%ymm1
- vinserti128 $1,32(%r13),%ymm2,%ymm2
- vinserti128 $1,48(%r13),%ymm3,%ymm3
- jmp .Last_avx2
-
-.align 32
-.Last_avx2:
- leaq 128+16(%rsp),%r13
- rorxl $2,%ebp,%ebx
- andnl %edx,%ebp,%edi
- andl %ecx,%ebp
- xorl %edi,%ebp
- subq $-128,%r9
- addl -128(%r13),%esi
- andnl %ecx,%eax,%edi
- addl %ebp,%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- andl %ebx,%eax
- addl %r12d,%esi
- xorl %edi,%eax
- addl -124(%r13),%edx
- andnl %ebx,%esi,%edi
- addl %eax,%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- andl %ebp,%esi
- addl %r12d,%edx
- xorl %edi,%esi
- addl -120(%r13),%ecx
- andnl %ebp,%edx,%edi
- addl %esi,%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- andl %eax,%edx
- addl %r12d,%ecx
- xorl %edi,%edx
- addl -116(%r13),%ebx
- andnl %eax,%ecx,%edi
- addl %edx,%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- andl %esi,%ecx
- addl %r12d,%ebx
- xorl %edi,%ecx
- addl -96(%r13),%ebp
- andnl %esi,%ebx,%edi
- addl %ecx,%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- andl %edx,%ebx
- addl %r12d,%ebp
- xorl %edi,%ebx
- addl -92(%r13),%eax
- andnl %edx,%ebp,%edi
- addl %ebx,%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- andl %ecx,%ebp
- addl %r12d,%eax
- xorl %edi,%ebp
- addl -88(%r13),%esi
- andnl %ecx,%eax,%edi
- addl %ebp,%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- andl %ebx,%eax
- addl %r12d,%esi
- xorl %edi,%eax
- addl -84(%r13),%edx
- andnl %ebx,%esi,%edi
- addl %eax,%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- andl %ebp,%esi
- addl %r12d,%edx
- xorl %edi,%esi
- addl -64(%r13),%ecx
- andnl %ebp,%edx,%edi
- addl %esi,%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- andl %eax,%edx
- addl %r12d,%ecx
- xorl %edi,%edx
- addl -60(%r13),%ebx
- andnl %eax,%ecx,%edi
- addl %edx,%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- andl %esi,%ecx
- addl %r12d,%ebx
- xorl %edi,%ecx
- addl -56(%r13),%ebp
- andnl %esi,%ebx,%edi
- addl %ecx,%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- andl %edx,%ebx
- addl %r12d,%ebp
- xorl %edi,%ebx
- addl -52(%r13),%eax
- andnl %edx,%ebp,%edi
- addl %ebx,%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- andl %ecx,%ebp
- addl %r12d,%eax
- xorl %edi,%ebp
- addl -32(%r13),%esi
- andnl %ecx,%eax,%edi
- addl %ebp,%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- andl %ebx,%eax
- addl %r12d,%esi
- xorl %edi,%eax
- addl -28(%r13),%edx
- andnl %ebx,%esi,%edi
- addl %eax,%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- andl %ebp,%esi
- addl %r12d,%edx
- xorl %edi,%esi
- addl -24(%r13),%ecx
- andnl %ebp,%edx,%edi
- addl %esi,%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- andl %eax,%edx
- addl %r12d,%ecx
- xorl %edi,%edx
- addl -20(%r13),%ebx
- andnl %eax,%ecx,%edi
- addl %edx,%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- andl %esi,%ecx
- addl %r12d,%ebx
- xorl %edi,%ecx
- addl 0(%r13),%ebp
- andnl %esi,%ebx,%edi
- addl %ecx,%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- andl %edx,%ebx
- addl %r12d,%ebp
- xorl %edi,%ebx
- addl 4(%r13),%eax
- andnl %edx,%ebp,%edi
- addl %ebx,%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- andl %ecx,%ebp
- addl %r12d,%eax
- xorl %edi,%ebp
- addl 8(%r13),%esi
- andnl %ecx,%eax,%edi
- addl %ebp,%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- andl %ebx,%eax
- addl %r12d,%esi
- xorl %edi,%eax
- addl 12(%r13),%edx
- leal (%rdx,%rax,1),%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- xorl %ebp,%esi
- addl %r12d,%edx
- xorl %ebx,%esi
- addl 32(%r13),%ecx
- leal (%rcx,%rsi,1),%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- xorl %eax,%edx
- addl %r12d,%ecx
- xorl %ebp,%edx
- addl 36(%r13),%ebx
- leal (%rbx,%rdx,1),%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- xorl %esi,%ecx
- addl %r12d,%ebx
- xorl %eax,%ecx
- addl 40(%r13),%ebp
- leal (%rcx,%rbp,1),%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- xorl %edx,%ebx
- addl %r12d,%ebp
- xorl %esi,%ebx
- addl 44(%r13),%eax
- leal (%rax,%rbx,1),%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- xorl %ecx,%ebp
- addl %r12d,%eax
- xorl %edx,%ebp
- addl 64(%r13),%esi
- leal (%rsi,%rbp,1),%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- xorl %ebx,%eax
- addl %r12d,%esi
- xorl %ecx,%eax
- vmovdqu -64(%r14),%ymm11
- vpshufb %ymm6,%ymm0,%ymm0
- addl 68(%r13),%edx
- leal (%rdx,%rax,1),%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- xorl %ebp,%esi
- addl %r12d,%edx
- xorl %ebx,%esi
- addl 72(%r13),%ecx
- leal (%rcx,%rsi,1),%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- xorl %eax,%edx
- addl %r12d,%ecx
- xorl %ebp,%edx
- addl 76(%r13),%ebx
- leal (%rbx,%rdx,1),%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- xorl %esi,%ecx
- addl %r12d,%ebx
- xorl %eax,%ecx
- addl 96(%r13),%ebp
- leal (%rcx,%rbp,1),%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- xorl %edx,%ebx
- addl %r12d,%ebp
- xorl %esi,%ebx
- addl 100(%r13),%eax
- leal (%rax,%rbx,1),%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- xorl %ecx,%ebp
- addl %r12d,%eax
- xorl %edx,%ebp
- vpshufb %ymm6,%ymm1,%ymm1
- vpaddd %ymm11,%ymm0,%ymm8
- addl 104(%r13),%esi
- leal (%rsi,%rbp,1),%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- xorl %ebx,%eax
- addl %r12d,%esi
- xorl %ecx,%eax
- addl 108(%r13),%edx
- leaq 256(%r13),%r13
- leal (%rdx,%rax,1),%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- xorl %ebp,%esi
- addl %r12d,%edx
- xorl %ebx,%esi
- addl -128(%r13),%ecx
- leal (%rcx,%rsi,1),%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- xorl %eax,%edx
- addl %r12d,%ecx
- xorl %ebp,%edx
- addl -124(%r13),%ebx
- leal (%rbx,%rdx,1),%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- xorl %esi,%ecx
- addl %r12d,%ebx
- xorl %eax,%ecx
- addl -120(%r13),%ebp
- leal (%rcx,%rbp,1),%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- xorl %edx,%ebx
- addl %r12d,%ebp
- xorl %esi,%ebx
- vmovdqu %ymm8,0(%rsp)
- vpshufb %ymm6,%ymm2,%ymm2
- vpaddd %ymm11,%ymm1,%ymm9
- addl -116(%r13),%eax
- leal (%rax,%rbx,1),%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- xorl %ecx,%ebp
- addl %r12d,%eax
- xorl %edx,%ebp
- addl -96(%r13),%esi
- leal (%rsi,%rbp,1),%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- xorl %ebx,%eax
- addl %r12d,%esi
- xorl %ecx,%eax
- addl -92(%r13),%edx
- leal (%rdx,%rax,1),%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- xorl %ebp,%esi
- addl %r12d,%edx
- xorl %ebx,%esi
- addl -88(%r13),%ecx
- leal (%rcx,%rsi,1),%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- xorl %eax,%edx
- addl %r12d,%ecx
- xorl %ebp,%edx
- addl -84(%r13),%ebx
- movl %esi,%edi
- xorl %eax,%edi
- leal (%rbx,%rdx,1),%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- xorl %esi,%ecx
- addl %r12d,%ebx
- andl %edi,%ecx
- vmovdqu %ymm9,32(%rsp)
- vpshufb %ymm6,%ymm3,%ymm3
- vpaddd %ymm11,%ymm2,%ymm6
- addl -64(%r13),%ebp
- xorl %esi,%ecx
- movl %edx,%edi
- xorl %esi,%edi
- leal (%rcx,%rbp,1),%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- xorl %edx,%ebx
- addl %r12d,%ebp
- andl %edi,%ebx
- addl -60(%r13),%eax
- xorl %edx,%ebx
- movl %ecx,%edi
- xorl %edx,%edi
- leal (%rax,%rbx,1),%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- xorl %ecx,%ebp
- addl %r12d,%eax
- andl %edi,%ebp
- addl -56(%r13),%esi
- xorl %ecx,%ebp
- movl %ebx,%edi
- xorl %ecx,%edi
- leal (%rsi,%rbp,1),%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- xorl %ebx,%eax
- addl %r12d,%esi
- andl %edi,%eax
- addl -52(%r13),%edx
- xorl %ebx,%eax
- movl %ebp,%edi
- xorl %ebx,%edi
- leal (%rdx,%rax,1),%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- xorl %ebp,%esi
- addl %r12d,%edx
- andl %edi,%esi
- addl -32(%r13),%ecx
- xorl %ebp,%esi
- movl %eax,%edi
- xorl %ebp,%edi
- leal (%rcx,%rsi,1),%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- xorl %eax,%edx
- addl %r12d,%ecx
- andl %edi,%edx
- jmp .Lalign32_3
-.align 32
-.Lalign32_3:
- vmovdqu %ymm6,64(%rsp)
- vpaddd %ymm11,%ymm3,%ymm7
- addl -28(%r13),%ebx
- xorl %eax,%edx
- movl %esi,%edi
- xorl %eax,%edi
- leal (%rbx,%rdx,1),%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- xorl %esi,%ecx
- addl %r12d,%ebx
- andl %edi,%ecx
- addl -24(%r13),%ebp
- xorl %esi,%ecx
- movl %edx,%edi
- xorl %esi,%edi
- leal (%rcx,%rbp,1),%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- xorl %edx,%ebx
- addl %r12d,%ebp
- andl %edi,%ebx
- addl -20(%r13),%eax
- xorl %edx,%ebx
- movl %ecx,%edi
- xorl %edx,%edi
- leal (%rax,%rbx,1),%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- xorl %ecx,%ebp
- addl %r12d,%eax
- andl %edi,%ebp
- addl 0(%r13),%esi
- xorl %ecx,%ebp
- movl %ebx,%edi
- xorl %ecx,%edi
- leal (%rsi,%rbp,1),%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- xorl %ebx,%eax
- addl %r12d,%esi
- andl %edi,%eax
- addl 4(%r13),%edx
- xorl %ebx,%eax
- movl %ebp,%edi
- xorl %ebx,%edi
- leal (%rdx,%rax,1),%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- xorl %ebp,%esi
- addl %r12d,%edx
- andl %edi,%esi
- vmovdqu %ymm7,96(%rsp)
- addl 8(%r13),%ecx
- xorl %ebp,%esi
- movl %eax,%edi
- xorl %ebp,%edi
- leal (%rcx,%rsi,1),%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- xorl %eax,%edx
- addl %r12d,%ecx
- andl %edi,%edx
- addl 12(%r13),%ebx
- xorl %eax,%edx
- movl %esi,%edi
- xorl %eax,%edi
- leal (%rbx,%rdx,1),%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- xorl %esi,%ecx
- addl %r12d,%ebx
- andl %edi,%ecx
- addl 32(%r13),%ebp
- xorl %esi,%ecx
- movl %edx,%edi
- xorl %esi,%edi
- leal (%rcx,%rbp,1),%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- xorl %edx,%ebx
- addl %r12d,%ebp
- andl %edi,%ebx
- addl 36(%r13),%eax
- xorl %edx,%ebx
- movl %ecx,%edi
- xorl %edx,%edi
- leal (%rax,%rbx,1),%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- xorl %ecx,%ebp
- addl %r12d,%eax
- andl %edi,%ebp
- addl 40(%r13),%esi
- xorl %ecx,%ebp
- movl %ebx,%edi
- xorl %ecx,%edi
- leal (%rsi,%rbp,1),%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- xorl %ebx,%eax
- addl %r12d,%esi
- andl %edi,%eax
- vpalignr $8,%ymm0,%ymm1,%ymm4
- addl 44(%r13),%edx
- xorl %ebx,%eax
- movl %ebp,%edi
- xorl %ebx,%edi
- vpsrldq $4,%ymm3,%ymm8
- leal (%rdx,%rax,1),%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- vpxor %ymm0,%ymm4,%ymm4
- vpxor %ymm2,%ymm8,%ymm8
- xorl %ebp,%esi
- addl %r12d,%edx
- vpxor %ymm8,%ymm4,%ymm4
- andl %edi,%esi
- addl 64(%r13),%ecx
- xorl %ebp,%esi
- movl %eax,%edi
- vpsrld $31,%ymm4,%ymm8
- xorl %ebp,%edi
- leal (%rcx,%rsi,1),%ecx
- rorxl $27,%edx,%r12d
- vpslldq $12,%ymm4,%ymm10
- vpaddd %ymm4,%ymm4,%ymm4
- rorxl $2,%edx,%esi
- xorl %eax,%edx
- vpsrld $30,%ymm10,%ymm9
- vpor %ymm8,%ymm4,%ymm4
- addl %r12d,%ecx
- andl %edi,%edx
- vpslld $2,%ymm10,%ymm10
- vpxor %ymm9,%ymm4,%ymm4
- addl 68(%r13),%ebx
- xorl %eax,%edx
- vpxor %ymm10,%ymm4,%ymm4
- movl %esi,%edi
- xorl %eax,%edi
- leal (%rbx,%rdx,1),%ebx
- vpaddd %ymm11,%ymm4,%ymm9
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- xorl %esi,%ecx
- vmovdqu %ymm9,128(%rsp)
- addl %r12d,%ebx
- andl %edi,%ecx
- addl 72(%r13),%ebp
- xorl %esi,%ecx
- movl %edx,%edi
- xorl %esi,%edi
- leal (%rcx,%rbp,1),%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- xorl %edx,%ebx
- addl %r12d,%ebp
- andl %edi,%ebx
- addl 76(%r13),%eax
- xorl %edx,%ebx
- leal (%rax,%rbx,1),%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- xorl %ecx,%ebp
- addl %r12d,%eax
- xorl %edx,%ebp
- vpalignr $8,%ymm1,%ymm2,%ymm5
- addl 96(%r13),%esi
- leal (%rsi,%rbp,1),%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- vpsrldq $4,%ymm4,%ymm8
- xorl %ebx,%eax
- addl %r12d,%esi
- xorl %ecx,%eax
- vpxor %ymm1,%ymm5,%ymm5
- vpxor %ymm3,%ymm8,%ymm8
- addl 100(%r13),%edx
- leal (%rdx,%rax,1),%edx
- vpxor %ymm8,%ymm5,%ymm5
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- xorl %ebp,%esi
- addl %r12d,%edx
- vpsrld $31,%ymm5,%ymm8
- vmovdqu -32(%r14),%ymm11
- xorl %ebx,%esi
- addl 104(%r13),%ecx
- leal (%rcx,%rsi,1),%ecx
- vpslldq $12,%ymm5,%ymm10
- vpaddd %ymm5,%ymm5,%ymm5
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- vpsrld $30,%ymm10,%ymm9
- vpor %ymm8,%ymm5,%ymm5
- xorl %eax,%edx
- addl %r12d,%ecx
- vpslld $2,%ymm10,%ymm10
- vpxor %ymm9,%ymm5,%ymm5
- xorl %ebp,%edx
- addl 108(%r13),%ebx
- leaq 256(%r13),%r13
- vpxor %ymm10,%ymm5,%ymm5
- leal (%rbx,%rdx,1),%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- vpaddd %ymm11,%ymm5,%ymm9
- xorl %esi,%ecx
- addl %r12d,%ebx
- xorl %eax,%ecx
- vmovdqu %ymm9,160(%rsp)
- addl -128(%r13),%ebp
- leal (%rcx,%rbp,1),%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- xorl %edx,%ebx
- addl %r12d,%ebp
- xorl %esi,%ebx
- vpalignr $8,%ymm2,%ymm3,%ymm6
- addl -124(%r13),%eax
- leal (%rax,%rbx,1),%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- vpsrldq $4,%ymm5,%ymm8
- xorl %ecx,%ebp
- addl %r12d,%eax
- xorl %edx,%ebp
- vpxor %ymm2,%ymm6,%ymm6
- vpxor %ymm4,%ymm8,%ymm8
- addl -120(%r13),%esi
- leal (%rsi,%rbp,1),%esi
- vpxor %ymm8,%ymm6,%ymm6
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- xorl %ebx,%eax
- addl %r12d,%esi
- vpsrld $31,%ymm6,%ymm8
- xorl %ecx,%eax
- addl -116(%r13),%edx
- leal (%rdx,%rax,1),%edx
- vpslldq $12,%ymm6,%ymm10
- vpaddd %ymm6,%ymm6,%ymm6
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- vpsrld $30,%ymm10,%ymm9
- vpor %ymm8,%ymm6,%ymm6
- xorl %ebp,%esi
- addl %r12d,%edx
- vpslld $2,%ymm10,%ymm10
- vpxor %ymm9,%ymm6,%ymm6
- xorl %ebx,%esi
- addl -96(%r13),%ecx
- vpxor %ymm10,%ymm6,%ymm6
- leal (%rcx,%rsi,1),%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- vpaddd %ymm11,%ymm6,%ymm9
- xorl %eax,%edx
- addl %r12d,%ecx
- xorl %ebp,%edx
- vmovdqu %ymm9,192(%rsp)
- addl -92(%r13),%ebx
- leal (%rbx,%rdx,1),%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- xorl %esi,%ecx
- addl %r12d,%ebx
- xorl %eax,%ecx
- vpalignr $8,%ymm3,%ymm4,%ymm7
- addl -88(%r13),%ebp
- leal (%rcx,%rbp,1),%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- vpsrldq $4,%ymm6,%ymm8
- xorl %edx,%ebx
- addl %r12d,%ebp
- xorl %esi,%ebx
- vpxor %ymm3,%ymm7,%ymm7
- vpxor %ymm5,%ymm8,%ymm8
- addl -84(%r13),%eax
- leal (%rax,%rbx,1),%eax
- vpxor %ymm8,%ymm7,%ymm7
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- xorl %ecx,%ebp
- addl %r12d,%eax
- vpsrld $31,%ymm7,%ymm8
- xorl %edx,%ebp
- addl -64(%r13),%esi
- leal (%rsi,%rbp,1),%esi
- vpslldq $12,%ymm7,%ymm10
- vpaddd %ymm7,%ymm7,%ymm7
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- vpsrld $30,%ymm10,%ymm9
- vpor %ymm8,%ymm7,%ymm7
- xorl %ebx,%eax
- addl %r12d,%esi
- vpslld $2,%ymm10,%ymm10
- vpxor %ymm9,%ymm7,%ymm7
- xorl %ecx,%eax
- addl -60(%r13),%edx
- vpxor %ymm10,%ymm7,%ymm7
- leal (%rdx,%rax,1),%edx
- rorxl $27,%esi,%r12d
- rorxl $2,%esi,%eax
- vpaddd %ymm11,%ymm7,%ymm9
- xorl %ebp,%esi
- addl %r12d,%edx
- xorl %ebx,%esi
- vmovdqu %ymm9,224(%rsp)
- addl -56(%r13),%ecx
- leal (%rcx,%rsi,1),%ecx
- rorxl $27,%edx,%r12d
- rorxl $2,%edx,%esi
- xorl %eax,%edx
- addl %r12d,%ecx
- xorl %ebp,%edx
- addl -52(%r13),%ebx
- leal (%rbx,%rdx,1),%ebx
- rorxl $27,%ecx,%r12d
- rorxl $2,%ecx,%edx
- xorl %esi,%ecx
- addl %r12d,%ebx
- xorl %eax,%ecx
- addl -32(%r13),%ebp
- leal (%rcx,%rbp,1),%ebp
- rorxl $27,%ebx,%r12d
- rorxl $2,%ebx,%ecx
- xorl %edx,%ebx
- addl %r12d,%ebp
- xorl %esi,%ebx
- addl -28(%r13),%eax
- leal (%rax,%rbx,1),%eax
- rorxl $27,%ebp,%r12d
- rorxl $2,%ebp,%ebx
- xorl %ecx,%ebp
- addl %r12d,%eax
- xorl %edx,%ebp
- addl -24(%r13),%esi
- leal (%rsi,%rbp,1),%esi
- rorxl $27,%eax,%r12d
- rorxl $2,%eax,%ebp
- xorl %ebx,%eax
- addl %r12d,%esi
- xorl %ecx,%eax
- addl -20(%r13),%edx
- leal (%rdx,%rax,1),%edx
- rorxl $27,%esi,%r12d
- addl %r12d,%edx
- leaq 128(%rsp),%r13
-
-
- addl 0(%r8),%edx
- addl 4(%r8),%esi
- addl 8(%r8),%ebp
- movl %edx,0(%r8)
- addl 12(%r8),%ebx
- movl %esi,4(%r8)
- movl %edx,%eax
- addl 16(%r8),%ecx
- movl %ebp,%r12d
- movl %ebp,8(%r8)
- movl %ebx,%edx
-
- movl %ebx,12(%r8)
- movl %esi,%ebp
- movl %ecx,16(%r8)
-
- movl %ecx,%esi
- movl %r12d,%ecx
-
-
- cmpq %r10,%r9
- jbe .Loop_avx2
-
-.Ldone_avx2:
- vzeroupper
- movq -40(%r11),%r14
-.cfi_restore %r14
- movq -32(%r11),%r13
-.cfi_restore %r13
- movq -24(%r11),%r12
-.cfi_restore %r12
- movq -16(%r11),%rbp
-.cfi_restore %rbp
- movq -8(%r11),%rbx
-.cfi_restore %rbx
- leaq (%r11),%rsp
-.cfi_def_cfa_register %rsp
-.Lepilogue_avx2:
- .byte 0xf3,0xc3
-.cfi_endproc
-.size sha1_block_data_order_avx2,.-sha1_block_data_order_avx2
.align 64
K_XX_XX:
.long 0x5a827999,0x5a827999,0x5a827999,0x5a827999
diff --git a/secure/lib/libcrypto/amd64/sha256-mb-x86_64.S b/secure/lib/libcrypto/amd64/sha256-mb-x86_64.S
index 1c77e3d13a8b4..63dca42029eac 100644
--- a/secure/lib/libcrypto/amd64/sha256-mb-x86_64.S
+++ b/secure/lib/libcrypto/amd64/sha256-mb-x86_64.S
@@ -12,8 +12,6 @@ sha256_multi_block:
movq OPENSSL_ia32cap_P+4(%rip),%rcx
btq $61,%rcx
jc _shaext_shortcut
- testl $268435456,%ecx
- jnz _avx_shortcut
movq %rsp,%rax
.cfi_def_cfa_register %rax
pushq %rbx
@@ -3127,4676 +3125,6 @@ _shaext_shortcut:
.byte 0xf3,0xc3
.cfi_endproc
.size sha256_multi_block_shaext,.-sha256_multi_block_shaext
-.type sha256_multi_block_avx,@function
-.align 32
-sha256_multi_block_avx:
-.cfi_startproc
-_avx_shortcut:
- shrq $32,%rcx
- cmpl $2,%edx
- jb .Lavx
- testl $32,%ecx
- jnz _avx2_shortcut
- jmp .Lavx
-.align 32
-.Lavx:
- movq %rsp,%rax
-.cfi_def_cfa_register %rax
- pushq %rbx
-.cfi_offset %rbx,-16
- pushq %rbp
-.cfi_offset %rbp,-24
- subq $288,%rsp
- andq $-256,%rsp
- movq %rax,272(%rsp)
-.cfi_escape 0x0f,0x06,0x77,0x90,0x02,0x06,0x23,0x08
-.Lbody_avx:
- leaq K256+128(%rip),%rbp
- leaq 256(%rsp),%rbx
- leaq 128(%rdi),%rdi
-
-.Loop_grande_avx:
- movl %edx,280(%rsp)
- xorl %edx,%edx
- movq 0(%rsi),%r8
- movl 8(%rsi),%ecx
- cmpl %edx,%ecx
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- movl %ecx,0(%rbx)
- cmovleq %rbp,%r8
- movq 16(%rsi),%r9
- movl 24(%rsi),%ecx
- cmpl %edx,%ecx
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- movl %ecx,4(%rbx)
- cmovleq %rbp,%r9
- movq 32(%rsi),%r10
- movl 40(%rsi),%ecx
- cmpl %edx,%ecx
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- movl %ecx,8(%rbx)
- cmovleq %rbp,%r10
- movq 48(%rsi),%r11
- movl 56(%rsi),%ecx
- cmpl %edx,%ecx
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- movl %ecx,12(%rbx)
- cmovleq %rbp,%r11
- testl %edx,%edx
- jz .Ldone_avx
-
- vmovdqu 0-128(%rdi),%xmm8
- leaq 128(%rsp),%rax
- vmovdqu 32-128(%rdi),%xmm9
- vmovdqu 64-128(%rdi),%xmm10
- vmovdqu 96-128(%rdi),%xmm11
- vmovdqu 128-128(%rdi),%xmm12
- vmovdqu 160-128(%rdi),%xmm13
- vmovdqu 192-128(%rdi),%xmm14
- vmovdqu 224-128(%rdi),%xmm15
- vmovdqu .Lpbswap(%rip),%xmm6
- jmp .Loop_avx
-
-.align 32
-.Loop_avx:
- vpxor %xmm9,%xmm10,%xmm4
- vmovd 0(%r8),%xmm5
- vmovd 0(%r9),%xmm0
- vpinsrd $1,0(%r10),%xmm5,%xmm5
- vpinsrd $1,0(%r11),%xmm0,%xmm0
- vpunpckldq %xmm0,%xmm5,%xmm5
- vpshufb %xmm6,%xmm5,%xmm5
- vpsrld $6,%xmm12,%xmm7
- vpslld $26,%xmm12,%xmm2
- vmovdqu %xmm5,0-128(%rax)
- vpaddd %xmm15,%xmm5,%xmm5
-
- vpsrld $11,%xmm12,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $21,%xmm12,%xmm2
- vpaddd -128(%rbp),%xmm5,%xmm5
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $25,%xmm12,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $7,%xmm12,%xmm2
- vpandn %xmm14,%xmm12,%xmm0
- vpand %xmm13,%xmm12,%xmm3
-
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $2,%xmm8,%xmm15
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $30,%xmm8,%xmm1
- vpxor %xmm3,%xmm0,%xmm0
- vpxor %xmm8,%xmm9,%xmm3
-
- vpxor %xmm1,%xmm15,%xmm15
- vpaddd %xmm7,%xmm5,%xmm5
-
- vpsrld $13,%xmm8,%xmm1
-
- vpslld $19,%xmm8,%xmm2
- vpaddd %xmm0,%xmm5,%xmm5
- vpand %xmm3,%xmm4,%xmm4
-
- vpxor %xmm1,%xmm15,%xmm7
-
- vpsrld $22,%xmm8,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $10,%xmm8,%xmm2
- vpxor %xmm4,%xmm9,%xmm15
- vpaddd %xmm5,%xmm11,%xmm11
-
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
-
- vpaddd %xmm5,%xmm15,%xmm15
- vpaddd %xmm7,%xmm15,%xmm15
- vmovd 4(%r8),%xmm5
- vmovd 4(%r9),%xmm0
- vpinsrd $1,4(%r10),%xmm5,%xmm5
- vpinsrd $1,4(%r11),%xmm0,%xmm0
- vpunpckldq %xmm0,%xmm5,%xmm5
- vpshufb %xmm6,%xmm5,%xmm5
- vpsrld $6,%xmm11,%xmm7
- vpslld $26,%xmm11,%xmm2
- vmovdqu %xmm5,16-128(%rax)
- vpaddd %xmm14,%xmm5,%xmm5
-
- vpsrld $11,%xmm11,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $21,%xmm11,%xmm2
- vpaddd -96(%rbp),%xmm5,%xmm5
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $25,%xmm11,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $7,%xmm11,%xmm2
- vpandn %xmm13,%xmm11,%xmm0
- vpand %xmm12,%xmm11,%xmm4
-
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $2,%xmm15,%xmm14
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $30,%xmm15,%xmm1
- vpxor %xmm4,%xmm0,%xmm0
- vpxor %xmm15,%xmm8,%xmm4
-
- vpxor %xmm1,%xmm14,%xmm14
- vpaddd %xmm7,%xmm5,%xmm5
-
- vpsrld $13,%xmm15,%xmm1
-
- vpslld $19,%xmm15,%xmm2
- vpaddd %xmm0,%xmm5,%xmm5
- vpand %xmm4,%xmm3,%xmm3
-
- vpxor %xmm1,%xmm14,%xmm7
-
- vpsrld $22,%xmm15,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $10,%xmm15,%xmm2
- vpxor %xmm3,%xmm8,%xmm14
- vpaddd %xmm5,%xmm10,%xmm10
-
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
-
- vpaddd %xmm5,%xmm14,%xmm14
- vpaddd %xmm7,%xmm14,%xmm14
- vmovd 8(%r8),%xmm5
- vmovd 8(%r9),%xmm0
- vpinsrd $1,8(%r10),%xmm5,%xmm5
- vpinsrd $1,8(%r11),%xmm0,%xmm0
- vpunpckldq %xmm0,%xmm5,%xmm5
- vpshufb %xmm6,%xmm5,%xmm5
- vpsrld $6,%xmm10,%xmm7
- vpslld $26,%xmm10,%xmm2
- vmovdqu %xmm5,32-128(%rax)
- vpaddd %xmm13,%xmm5,%xmm5
-
- vpsrld $11,%xmm10,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $21,%xmm10,%xmm2
- vpaddd -64(%rbp),%xmm5,%xmm5
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $25,%xmm10,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $7,%xmm10,%xmm2
- vpandn %xmm12,%xmm10,%xmm0
- vpand %xmm11,%xmm10,%xmm3
-
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $2,%xmm14,%xmm13
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $30,%xmm14,%xmm1
- vpxor %xmm3,%xmm0,%xmm0
- vpxor %xmm14,%xmm15,%xmm3
-
- vpxor %xmm1,%xmm13,%xmm13
- vpaddd %xmm7,%xmm5,%xmm5
-
- vpsrld $13,%xmm14,%xmm1
-
- vpslld $19,%xmm14,%xmm2
- vpaddd %xmm0,%xmm5,%xmm5
- vpand %xmm3,%xmm4,%xmm4
-
- vpxor %xmm1,%xmm13,%xmm7
-
- vpsrld $22,%xmm14,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $10,%xmm14,%xmm2
- vpxor %xmm4,%xmm15,%xmm13
- vpaddd %xmm5,%xmm9,%xmm9
-
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
-
- vpaddd %xmm5,%xmm13,%xmm13
- vpaddd %xmm7,%xmm13,%xmm13
- vmovd 12(%r8),%xmm5
- vmovd 12(%r9),%xmm0
- vpinsrd $1,12(%r10),%xmm5,%xmm5
- vpinsrd $1,12(%r11),%xmm0,%xmm0
- vpunpckldq %xmm0,%xmm5,%xmm5
- vpshufb %xmm6,%xmm5,%xmm5
- vpsrld $6,%xmm9,%xmm7
- vpslld $26,%xmm9,%xmm2
- vmovdqu %xmm5,48-128(%rax)
- vpaddd %xmm12,%xmm5,%xmm5
-
- vpsrld $11,%xmm9,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $21,%xmm9,%xmm2
- vpaddd -32(%rbp),%xmm5,%xmm5
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $25,%xmm9,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $7,%xmm9,%xmm2
- vpandn %xmm11,%xmm9,%xmm0
- vpand %xmm10,%xmm9,%xmm4
-
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $2,%xmm13,%xmm12
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $30,%xmm13,%xmm1
- vpxor %xmm4,%xmm0,%xmm0
- vpxor %xmm13,%xmm14,%xmm4
-
- vpxor %xmm1,%xmm12,%xmm12
- vpaddd %xmm7,%xmm5,%xmm5
-
- vpsrld $13,%xmm13,%xmm1
-
- vpslld $19,%xmm13,%xmm2
- vpaddd %xmm0,%xmm5,%xmm5
- vpand %xmm4,%xmm3,%xmm3
-
- vpxor %xmm1,%xmm12,%xmm7
-
- vpsrld $22,%xmm13,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $10,%xmm13,%xmm2
- vpxor %xmm3,%xmm14,%xmm12
- vpaddd %xmm5,%xmm8,%xmm8
-
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
-
- vpaddd %xmm5,%xmm12,%xmm12
- vpaddd %xmm7,%xmm12,%xmm12
- vmovd 16(%r8),%xmm5
- vmovd 16(%r9),%xmm0
- vpinsrd $1,16(%r10),%xmm5,%xmm5
- vpinsrd $1,16(%r11),%xmm0,%xmm0
- vpunpckldq %xmm0,%xmm5,%xmm5
- vpshufb %xmm6,%xmm5,%xmm5
- vpsrld $6,%xmm8,%xmm7
- vpslld $26,%xmm8,%xmm2
- vmovdqu %xmm5,64-128(%rax)
- vpaddd %xmm11,%xmm5,%xmm5
-
- vpsrld $11,%xmm8,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $21,%xmm8,%xmm2
- vpaddd 0(%rbp),%xmm5,%xmm5
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $25,%xmm8,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $7,%xmm8,%xmm2
- vpandn %xmm10,%xmm8,%xmm0
- vpand %xmm9,%xmm8,%xmm3
-
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $2,%xmm12,%xmm11
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $30,%xmm12,%xmm1
- vpxor %xmm3,%xmm0,%xmm0
- vpxor %xmm12,%xmm13,%xmm3
-
- vpxor %xmm1,%xmm11,%xmm11
- vpaddd %xmm7,%xmm5,%xmm5
-
- vpsrld $13,%xmm12,%xmm1
-
- vpslld $19,%xmm12,%xmm2
- vpaddd %xmm0,%xmm5,%xmm5
- vpand %xmm3,%xmm4,%xmm4
-
- vpxor %xmm1,%xmm11,%xmm7
-
- vpsrld $22,%xmm12,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $10,%xmm12,%xmm2
- vpxor %xmm4,%xmm13,%xmm11
- vpaddd %xmm5,%xmm15,%xmm15
-
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
-
- vpaddd %xmm5,%xmm11,%xmm11
- vpaddd %xmm7,%xmm11,%xmm11
- vmovd 20(%r8),%xmm5
- vmovd 20(%r9),%xmm0
- vpinsrd $1,20(%r10),%xmm5,%xmm5
- vpinsrd $1,20(%r11),%xmm0,%xmm0
- vpunpckldq %xmm0,%xmm5,%xmm5
- vpshufb %xmm6,%xmm5,%xmm5
- vpsrld $6,%xmm15,%xmm7
- vpslld $26,%xmm15,%xmm2
- vmovdqu %xmm5,80-128(%rax)
- vpaddd %xmm10,%xmm5,%xmm5
-
- vpsrld $11,%xmm15,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $21,%xmm15,%xmm2
- vpaddd 32(%rbp),%xmm5,%xmm5
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $25,%xmm15,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $7,%xmm15,%xmm2
- vpandn %xmm9,%xmm15,%xmm0
- vpand %xmm8,%xmm15,%xmm4
-
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $2,%xmm11,%xmm10
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $30,%xmm11,%xmm1
- vpxor %xmm4,%xmm0,%xmm0
- vpxor %xmm11,%xmm12,%xmm4
-
- vpxor %xmm1,%xmm10,%xmm10
- vpaddd %xmm7,%xmm5,%xmm5
-
- vpsrld $13,%xmm11,%xmm1
-
- vpslld $19,%xmm11,%xmm2
- vpaddd %xmm0,%xmm5,%xmm5
- vpand %xmm4,%xmm3,%xmm3
-
- vpxor %xmm1,%xmm10,%xmm7
-
- vpsrld $22,%xmm11,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $10,%xmm11,%xmm2
- vpxor %xmm3,%xmm12,%xmm10
- vpaddd %xmm5,%xmm14,%xmm14
-
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
-
- vpaddd %xmm5,%xmm10,%xmm10
- vpaddd %xmm7,%xmm10,%xmm10
- vmovd 24(%r8),%xmm5
- vmovd 24(%r9),%xmm0
- vpinsrd $1,24(%r10),%xmm5,%xmm5
- vpinsrd $1,24(%r11),%xmm0,%xmm0
- vpunpckldq %xmm0,%xmm5,%xmm5
- vpshufb %xmm6,%xmm5,%xmm5
- vpsrld $6,%xmm14,%xmm7
- vpslld $26,%xmm14,%xmm2
- vmovdqu %xmm5,96-128(%rax)
- vpaddd %xmm9,%xmm5,%xmm5
-
- vpsrld $11,%xmm14,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $21,%xmm14,%xmm2
- vpaddd 64(%rbp),%xmm5,%xmm5
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $25,%xmm14,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $7,%xmm14,%xmm2
- vpandn %xmm8,%xmm14,%xmm0
- vpand %xmm15,%xmm14,%xmm3
-
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $2,%xmm10,%xmm9
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $30,%xmm10,%xmm1
- vpxor %xmm3,%xmm0,%xmm0
- vpxor %xmm10,%xmm11,%xmm3
-
- vpxor %xmm1,%xmm9,%xmm9
- vpaddd %xmm7,%xmm5,%xmm5
-
- vpsrld $13,%xmm10,%xmm1
-
- vpslld $19,%xmm10,%xmm2
- vpaddd %xmm0,%xmm5,%xmm5
- vpand %xmm3,%xmm4,%xmm4
-
- vpxor %xmm1,%xmm9,%xmm7
-
- vpsrld $22,%xmm10,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $10,%xmm10,%xmm2
- vpxor %xmm4,%xmm11,%xmm9
- vpaddd %xmm5,%xmm13,%xmm13
-
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
-
- vpaddd %xmm5,%xmm9,%xmm9
- vpaddd %xmm7,%xmm9,%xmm9
- vmovd 28(%r8),%xmm5
- vmovd 28(%r9),%xmm0
- vpinsrd $1,28(%r10),%xmm5,%xmm5
- vpinsrd $1,28(%r11),%xmm0,%xmm0
- vpunpckldq %xmm0,%xmm5,%xmm5
- vpshufb %xmm6,%xmm5,%xmm5
- vpsrld $6,%xmm13,%xmm7
- vpslld $26,%xmm13,%xmm2
- vmovdqu %xmm5,112-128(%rax)
- vpaddd %xmm8,%xmm5,%xmm5
-
- vpsrld $11,%xmm13,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $21,%xmm13,%xmm2
- vpaddd 96(%rbp),%xmm5,%xmm5
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $25,%xmm13,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $7,%xmm13,%xmm2
- vpandn %xmm15,%xmm13,%xmm0
- vpand %xmm14,%xmm13,%xmm4
-
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $2,%xmm9,%xmm8
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $30,%xmm9,%xmm1
- vpxor %xmm4,%xmm0,%xmm0
- vpxor %xmm9,%xmm10,%xmm4
-
- vpxor %xmm1,%xmm8,%xmm8
- vpaddd %xmm7,%xmm5,%xmm5
-
- vpsrld $13,%xmm9,%xmm1
-
- vpslld $19,%xmm9,%xmm2
- vpaddd %xmm0,%xmm5,%xmm5
- vpand %xmm4,%xmm3,%xmm3
-
- vpxor %xmm1,%xmm8,%xmm7
-
- vpsrld $22,%xmm9,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $10,%xmm9,%xmm2
- vpxor %xmm3,%xmm10,%xmm8
- vpaddd %xmm5,%xmm12,%xmm12
-
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
-
- vpaddd %xmm5,%xmm8,%xmm8
- vpaddd %xmm7,%xmm8,%xmm8
- addq $256,%rbp
- vmovd 32(%r8),%xmm5
- vmovd 32(%r9),%xmm0
- vpinsrd $1,32(%r10),%xmm5,%xmm5
- vpinsrd $1,32(%r11),%xmm0,%xmm0
- vpunpckldq %xmm0,%xmm5,%xmm5
- vpshufb %xmm6,%xmm5,%xmm5
- vpsrld $6,%xmm12,%xmm7
- vpslld $26,%xmm12,%xmm2
- vmovdqu %xmm5,128-128(%rax)
- vpaddd %xmm15,%xmm5,%xmm5
-
- vpsrld $11,%xmm12,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $21,%xmm12,%xmm2
- vpaddd -128(%rbp),%xmm5,%xmm5
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $25,%xmm12,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $7,%xmm12,%xmm2
- vpandn %xmm14,%xmm12,%xmm0
- vpand %xmm13,%xmm12,%xmm3
-
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $2,%xmm8,%xmm15
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $30,%xmm8,%xmm1
- vpxor %xmm3,%xmm0,%xmm0
- vpxor %xmm8,%xmm9,%xmm3
-
- vpxor %xmm1,%xmm15,%xmm15
- vpaddd %xmm7,%xmm5,%xmm5
-
- vpsrld $13,%xmm8,%xmm1
-
- vpslld $19,%xmm8,%xmm2
- vpaddd %xmm0,%xmm5,%xmm5
- vpand %xmm3,%xmm4,%xmm4
-
- vpxor %xmm1,%xmm15,%xmm7
-
- vpsrld $22,%xmm8,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $10,%xmm8,%xmm2
- vpxor %xmm4,%xmm9,%xmm15
- vpaddd %xmm5,%xmm11,%xmm11
-
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
-
- vpaddd %xmm5,%xmm15,%xmm15
- vpaddd %xmm7,%xmm15,%xmm15
- vmovd 36(%r8),%xmm5
- vmovd 36(%r9),%xmm0
- vpinsrd $1,36(%r10),%xmm5,%xmm5
- vpinsrd $1,36(%r11),%xmm0,%xmm0
- vpunpckldq %xmm0,%xmm5,%xmm5
- vpshufb %xmm6,%xmm5,%xmm5
- vpsrld $6,%xmm11,%xmm7
- vpslld $26,%xmm11,%xmm2
- vmovdqu %xmm5,144-128(%rax)
- vpaddd %xmm14,%xmm5,%xmm5
-
- vpsrld $11,%xmm11,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $21,%xmm11,%xmm2
- vpaddd -96(%rbp),%xmm5,%xmm5
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $25,%xmm11,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $7,%xmm11,%xmm2
- vpandn %xmm13,%xmm11,%xmm0
- vpand %xmm12,%xmm11,%xmm4
-
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $2,%xmm15,%xmm14
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $30,%xmm15,%xmm1
- vpxor %xmm4,%xmm0,%xmm0
- vpxor %xmm15,%xmm8,%xmm4
-
- vpxor %xmm1,%xmm14,%xmm14
- vpaddd %xmm7,%xmm5,%xmm5
-
- vpsrld $13,%xmm15,%xmm1
-
- vpslld $19,%xmm15,%xmm2
- vpaddd %xmm0,%xmm5,%xmm5
- vpand %xmm4,%xmm3,%xmm3
-
- vpxor %xmm1,%xmm14,%xmm7
-
- vpsrld $22,%xmm15,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $10,%xmm15,%xmm2
- vpxor %xmm3,%xmm8,%xmm14
- vpaddd %xmm5,%xmm10,%xmm10
-
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
-
- vpaddd %xmm5,%xmm14,%xmm14
- vpaddd %xmm7,%xmm14,%xmm14
- vmovd 40(%r8),%xmm5
- vmovd 40(%r9),%xmm0
- vpinsrd $1,40(%r10),%xmm5,%xmm5
- vpinsrd $1,40(%r11),%xmm0,%xmm0
- vpunpckldq %xmm0,%xmm5,%xmm5
- vpshufb %xmm6,%xmm5,%xmm5
- vpsrld $6,%xmm10,%xmm7
- vpslld $26,%xmm10,%xmm2
- vmovdqu %xmm5,160-128(%rax)
- vpaddd %xmm13,%xmm5,%xmm5
-
- vpsrld $11,%xmm10,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $21,%xmm10,%xmm2
- vpaddd -64(%rbp),%xmm5,%xmm5
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $25,%xmm10,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $7,%xmm10,%xmm2
- vpandn %xmm12,%xmm10,%xmm0
- vpand %xmm11,%xmm10,%xmm3
-
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $2,%xmm14,%xmm13
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $30,%xmm14,%xmm1
- vpxor %xmm3,%xmm0,%xmm0
- vpxor %xmm14,%xmm15,%xmm3
-
- vpxor %xmm1,%xmm13,%xmm13
- vpaddd %xmm7,%xmm5,%xmm5
-
- vpsrld $13,%xmm14,%xmm1
-
- vpslld $19,%xmm14,%xmm2
- vpaddd %xmm0,%xmm5,%xmm5
- vpand %xmm3,%xmm4,%xmm4
-
- vpxor %xmm1,%xmm13,%xmm7
-
- vpsrld $22,%xmm14,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $10,%xmm14,%xmm2
- vpxor %xmm4,%xmm15,%xmm13
- vpaddd %xmm5,%xmm9,%xmm9
-
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
-
- vpaddd %xmm5,%xmm13,%xmm13
- vpaddd %xmm7,%xmm13,%xmm13
- vmovd 44(%r8),%xmm5
- vmovd 44(%r9),%xmm0
- vpinsrd $1,44(%r10),%xmm5,%xmm5
- vpinsrd $1,44(%r11),%xmm0,%xmm0
- vpunpckldq %xmm0,%xmm5,%xmm5
- vpshufb %xmm6,%xmm5,%xmm5
- vpsrld $6,%xmm9,%xmm7
- vpslld $26,%xmm9,%xmm2
- vmovdqu %xmm5,176-128(%rax)
- vpaddd %xmm12,%xmm5,%xmm5
-
- vpsrld $11,%xmm9,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $21,%xmm9,%xmm2
- vpaddd -32(%rbp),%xmm5,%xmm5
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $25,%xmm9,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $7,%xmm9,%xmm2
- vpandn %xmm11,%xmm9,%xmm0
- vpand %xmm10,%xmm9,%xmm4
-
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $2,%xmm13,%xmm12
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $30,%xmm13,%xmm1
- vpxor %xmm4,%xmm0,%xmm0
- vpxor %xmm13,%xmm14,%xmm4
-
- vpxor %xmm1,%xmm12,%xmm12
- vpaddd %xmm7,%xmm5,%xmm5
-
- vpsrld $13,%xmm13,%xmm1
-
- vpslld $19,%xmm13,%xmm2
- vpaddd %xmm0,%xmm5,%xmm5
- vpand %xmm4,%xmm3,%xmm3
-
- vpxor %xmm1,%xmm12,%xmm7
-
- vpsrld $22,%xmm13,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $10,%xmm13,%xmm2
- vpxor %xmm3,%xmm14,%xmm12
- vpaddd %xmm5,%xmm8,%xmm8
-
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
-
- vpaddd %xmm5,%xmm12,%xmm12
- vpaddd %xmm7,%xmm12,%xmm12
- vmovd 48(%r8),%xmm5
- vmovd 48(%r9),%xmm0
- vpinsrd $1,48(%r10),%xmm5,%xmm5
- vpinsrd $1,48(%r11),%xmm0,%xmm0
- vpunpckldq %xmm0,%xmm5,%xmm5
- vpshufb %xmm6,%xmm5,%xmm5
- vpsrld $6,%xmm8,%xmm7
- vpslld $26,%xmm8,%xmm2
- vmovdqu %xmm5,192-128(%rax)
- vpaddd %xmm11,%xmm5,%xmm5
-
- vpsrld $11,%xmm8,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $21,%xmm8,%xmm2
- vpaddd 0(%rbp),%xmm5,%xmm5
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $25,%xmm8,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $7,%xmm8,%xmm2
- vpandn %xmm10,%xmm8,%xmm0
- vpand %xmm9,%xmm8,%xmm3
-
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $2,%xmm12,%xmm11
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $30,%xmm12,%xmm1
- vpxor %xmm3,%xmm0,%xmm0
- vpxor %xmm12,%xmm13,%xmm3
-
- vpxor %xmm1,%xmm11,%xmm11
- vpaddd %xmm7,%xmm5,%xmm5
-
- vpsrld $13,%xmm12,%xmm1
-
- vpslld $19,%xmm12,%xmm2
- vpaddd %xmm0,%xmm5,%xmm5
- vpand %xmm3,%xmm4,%xmm4
-
- vpxor %xmm1,%xmm11,%xmm7
-
- vpsrld $22,%xmm12,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $10,%xmm12,%xmm2
- vpxor %xmm4,%xmm13,%xmm11
- vpaddd %xmm5,%xmm15,%xmm15
-
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
-
- vpaddd %xmm5,%xmm11,%xmm11
- vpaddd %xmm7,%xmm11,%xmm11
- vmovd 52(%r8),%xmm5
- vmovd 52(%r9),%xmm0
- vpinsrd $1,52(%r10),%xmm5,%xmm5
- vpinsrd $1,52(%r11),%xmm0,%xmm0
- vpunpckldq %xmm0,%xmm5,%xmm5
- vpshufb %xmm6,%xmm5,%xmm5
- vpsrld $6,%xmm15,%xmm7
- vpslld $26,%xmm15,%xmm2
- vmovdqu %xmm5,208-128(%rax)
- vpaddd %xmm10,%xmm5,%xmm5
-
- vpsrld $11,%xmm15,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $21,%xmm15,%xmm2
- vpaddd 32(%rbp),%xmm5,%xmm5
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $25,%xmm15,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $7,%xmm15,%xmm2
- vpandn %xmm9,%xmm15,%xmm0
- vpand %xmm8,%xmm15,%xmm4
-
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $2,%xmm11,%xmm10
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $30,%xmm11,%xmm1
- vpxor %xmm4,%xmm0,%xmm0
- vpxor %xmm11,%xmm12,%xmm4
-
- vpxor %xmm1,%xmm10,%xmm10
- vpaddd %xmm7,%xmm5,%xmm5
-
- vpsrld $13,%xmm11,%xmm1
-
- vpslld $19,%xmm11,%xmm2
- vpaddd %xmm0,%xmm5,%xmm5
- vpand %xmm4,%xmm3,%xmm3
-
- vpxor %xmm1,%xmm10,%xmm7
-
- vpsrld $22,%xmm11,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $10,%xmm11,%xmm2
- vpxor %xmm3,%xmm12,%xmm10
- vpaddd %xmm5,%xmm14,%xmm14
-
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
-
- vpaddd %xmm5,%xmm10,%xmm10
- vpaddd %xmm7,%xmm10,%xmm10
- vmovd 56(%r8),%xmm5
- vmovd 56(%r9),%xmm0
- vpinsrd $1,56(%r10),%xmm5,%xmm5
- vpinsrd $1,56(%r11),%xmm0,%xmm0
- vpunpckldq %xmm0,%xmm5,%xmm5
- vpshufb %xmm6,%xmm5,%xmm5
- vpsrld $6,%xmm14,%xmm7
- vpslld $26,%xmm14,%xmm2
- vmovdqu %xmm5,224-128(%rax)
- vpaddd %xmm9,%xmm5,%xmm5
-
- vpsrld $11,%xmm14,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $21,%xmm14,%xmm2
- vpaddd 64(%rbp),%xmm5,%xmm5
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $25,%xmm14,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $7,%xmm14,%xmm2
- vpandn %xmm8,%xmm14,%xmm0
- vpand %xmm15,%xmm14,%xmm3
-
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $2,%xmm10,%xmm9
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $30,%xmm10,%xmm1
- vpxor %xmm3,%xmm0,%xmm0
- vpxor %xmm10,%xmm11,%xmm3
-
- vpxor %xmm1,%xmm9,%xmm9
- vpaddd %xmm7,%xmm5,%xmm5
-
- vpsrld $13,%xmm10,%xmm1
-
- vpslld $19,%xmm10,%xmm2
- vpaddd %xmm0,%xmm5,%xmm5
- vpand %xmm3,%xmm4,%xmm4
-
- vpxor %xmm1,%xmm9,%xmm7
-
- vpsrld $22,%xmm10,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $10,%xmm10,%xmm2
- vpxor %xmm4,%xmm11,%xmm9
- vpaddd %xmm5,%xmm13,%xmm13
-
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
-
- vpaddd %xmm5,%xmm9,%xmm9
- vpaddd %xmm7,%xmm9,%xmm9
- vmovd 60(%r8),%xmm5
- leaq 64(%r8),%r8
- vmovd 60(%r9),%xmm0
- leaq 64(%r9),%r9
- vpinsrd $1,60(%r10),%xmm5,%xmm5
- leaq 64(%r10),%r10
- vpinsrd $1,60(%r11),%xmm0,%xmm0
- leaq 64(%r11),%r11
- vpunpckldq %xmm0,%xmm5,%xmm5
- vpshufb %xmm6,%xmm5,%xmm5
- vpsrld $6,%xmm13,%xmm7
- vpslld $26,%xmm13,%xmm2
- vmovdqu %xmm5,240-128(%rax)
- vpaddd %xmm8,%xmm5,%xmm5
-
- vpsrld $11,%xmm13,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $21,%xmm13,%xmm2
- vpaddd 96(%rbp),%xmm5,%xmm5
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $25,%xmm13,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- prefetcht0 63(%r8)
- vpslld $7,%xmm13,%xmm2
- vpandn %xmm15,%xmm13,%xmm0
- vpand %xmm14,%xmm13,%xmm4
- prefetcht0 63(%r9)
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $2,%xmm9,%xmm8
- vpxor %xmm2,%xmm7,%xmm7
- prefetcht0 63(%r10)
- vpslld $30,%xmm9,%xmm1
- vpxor %xmm4,%xmm0,%xmm0
- vpxor %xmm9,%xmm10,%xmm4
- prefetcht0 63(%r11)
- vpxor %xmm1,%xmm8,%xmm8
- vpaddd %xmm7,%xmm5,%xmm5
-
- vpsrld $13,%xmm9,%xmm1
-
- vpslld $19,%xmm9,%xmm2
- vpaddd %xmm0,%xmm5,%xmm5
- vpand %xmm4,%xmm3,%xmm3
-
- vpxor %xmm1,%xmm8,%xmm7
-
- vpsrld $22,%xmm9,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $10,%xmm9,%xmm2
- vpxor %xmm3,%xmm10,%xmm8
- vpaddd %xmm5,%xmm12,%xmm12
-
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
-
- vpaddd %xmm5,%xmm8,%xmm8
- vpaddd %xmm7,%xmm8,%xmm8
- addq $256,%rbp
- vmovdqu 0-128(%rax),%xmm5
- movl $3,%ecx
- jmp .Loop_16_xx_avx
-.align 32
-.Loop_16_xx_avx:
- vmovdqu 16-128(%rax),%xmm6
- vpaddd 144-128(%rax),%xmm5,%xmm5
-
- vpsrld $3,%xmm6,%xmm7
- vpsrld $7,%xmm6,%xmm1
- vpslld $25,%xmm6,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
- vpsrld $18,%xmm6,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $14,%xmm6,%xmm2
- vmovdqu 224-128(%rax),%xmm0
- vpsrld $10,%xmm0,%xmm3
-
- vpxor %xmm1,%xmm7,%xmm7
- vpsrld $17,%xmm0,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $15,%xmm0,%xmm2
- vpaddd %xmm7,%xmm5,%xmm5
- vpxor %xmm1,%xmm3,%xmm7
- vpsrld $19,%xmm0,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $13,%xmm0,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
- vpaddd %xmm7,%xmm5,%xmm5
- vpsrld $6,%xmm12,%xmm7
- vpslld $26,%xmm12,%xmm2
- vmovdqu %xmm5,0-128(%rax)
- vpaddd %xmm15,%xmm5,%xmm5
-
- vpsrld $11,%xmm12,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $21,%xmm12,%xmm2
- vpaddd -128(%rbp),%xmm5,%xmm5
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $25,%xmm12,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $7,%xmm12,%xmm2
- vpandn %xmm14,%xmm12,%xmm0
- vpand %xmm13,%xmm12,%xmm3
-
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $2,%xmm8,%xmm15
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $30,%xmm8,%xmm1
- vpxor %xmm3,%xmm0,%xmm0
- vpxor %xmm8,%xmm9,%xmm3
-
- vpxor %xmm1,%xmm15,%xmm15
- vpaddd %xmm7,%xmm5,%xmm5
-
- vpsrld $13,%xmm8,%xmm1
-
- vpslld $19,%xmm8,%xmm2
- vpaddd %xmm0,%xmm5,%xmm5
- vpand %xmm3,%xmm4,%xmm4
-
- vpxor %xmm1,%xmm15,%xmm7
-
- vpsrld $22,%xmm8,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $10,%xmm8,%xmm2
- vpxor %xmm4,%xmm9,%xmm15
- vpaddd %xmm5,%xmm11,%xmm11
-
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
-
- vpaddd %xmm5,%xmm15,%xmm15
- vpaddd %xmm7,%xmm15,%xmm15
- vmovdqu 32-128(%rax),%xmm5
- vpaddd 160-128(%rax),%xmm6,%xmm6
-
- vpsrld $3,%xmm5,%xmm7
- vpsrld $7,%xmm5,%xmm1
- vpslld $25,%xmm5,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
- vpsrld $18,%xmm5,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $14,%xmm5,%xmm2
- vmovdqu 240-128(%rax),%xmm0
- vpsrld $10,%xmm0,%xmm4
-
- vpxor %xmm1,%xmm7,%xmm7
- vpsrld $17,%xmm0,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $15,%xmm0,%xmm2
- vpaddd %xmm7,%xmm6,%xmm6
- vpxor %xmm1,%xmm4,%xmm7
- vpsrld $19,%xmm0,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $13,%xmm0,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
- vpaddd %xmm7,%xmm6,%xmm6
- vpsrld $6,%xmm11,%xmm7
- vpslld $26,%xmm11,%xmm2
- vmovdqu %xmm6,16-128(%rax)
- vpaddd %xmm14,%xmm6,%xmm6
-
- vpsrld $11,%xmm11,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $21,%xmm11,%xmm2
- vpaddd -96(%rbp),%xmm6,%xmm6
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $25,%xmm11,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $7,%xmm11,%xmm2
- vpandn %xmm13,%xmm11,%xmm0
- vpand %xmm12,%xmm11,%xmm4
-
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $2,%xmm15,%xmm14
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $30,%xmm15,%xmm1
- vpxor %xmm4,%xmm0,%xmm0
- vpxor %xmm15,%xmm8,%xmm4
-
- vpxor %xmm1,%xmm14,%xmm14
- vpaddd %xmm7,%xmm6,%xmm6
-
- vpsrld $13,%xmm15,%xmm1
-
- vpslld $19,%xmm15,%xmm2
- vpaddd %xmm0,%xmm6,%xmm6
- vpand %xmm4,%xmm3,%xmm3
-
- vpxor %xmm1,%xmm14,%xmm7
-
- vpsrld $22,%xmm15,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $10,%xmm15,%xmm2
- vpxor %xmm3,%xmm8,%xmm14
- vpaddd %xmm6,%xmm10,%xmm10
-
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
-
- vpaddd %xmm6,%xmm14,%xmm14
- vpaddd %xmm7,%xmm14,%xmm14
- vmovdqu 48-128(%rax),%xmm6
- vpaddd 176-128(%rax),%xmm5,%xmm5
-
- vpsrld $3,%xmm6,%xmm7
- vpsrld $7,%xmm6,%xmm1
- vpslld $25,%xmm6,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
- vpsrld $18,%xmm6,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $14,%xmm6,%xmm2
- vmovdqu 0-128(%rax),%xmm0
- vpsrld $10,%xmm0,%xmm3
-
- vpxor %xmm1,%xmm7,%xmm7
- vpsrld $17,%xmm0,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $15,%xmm0,%xmm2
- vpaddd %xmm7,%xmm5,%xmm5
- vpxor %xmm1,%xmm3,%xmm7
- vpsrld $19,%xmm0,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $13,%xmm0,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
- vpaddd %xmm7,%xmm5,%xmm5
- vpsrld $6,%xmm10,%xmm7
- vpslld $26,%xmm10,%xmm2
- vmovdqu %xmm5,32-128(%rax)
- vpaddd %xmm13,%xmm5,%xmm5
-
- vpsrld $11,%xmm10,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $21,%xmm10,%xmm2
- vpaddd -64(%rbp),%xmm5,%xmm5
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $25,%xmm10,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $7,%xmm10,%xmm2
- vpandn %xmm12,%xmm10,%xmm0
- vpand %xmm11,%xmm10,%xmm3
-
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $2,%xmm14,%xmm13
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $30,%xmm14,%xmm1
- vpxor %xmm3,%xmm0,%xmm0
- vpxor %xmm14,%xmm15,%xmm3
-
- vpxor %xmm1,%xmm13,%xmm13
- vpaddd %xmm7,%xmm5,%xmm5
-
- vpsrld $13,%xmm14,%xmm1
-
- vpslld $19,%xmm14,%xmm2
- vpaddd %xmm0,%xmm5,%xmm5
- vpand %xmm3,%xmm4,%xmm4
-
- vpxor %xmm1,%xmm13,%xmm7
-
- vpsrld $22,%xmm14,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $10,%xmm14,%xmm2
- vpxor %xmm4,%xmm15,%xmm13
- vpaddd %xmm5,%xmm9,%xmm9
-
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
-
- vpaddd %xmm5,%xmm13,%xmm13
- vpaddd %xmm7,%xmm13,%xmm13
- vmovdqu 64-128(%rax),%xmm5
- vpaddd 192-128(%rax),%xmm6,%xmm6
-
- vpsrld $3,%xmm5,%xmm7
- vpsrld $7,%xmm5,%xmm1
- vpslld $25,%xmm5,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
- vpsrld $18,%xmm5,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $14,%xmm5,%xmm2
- vmovdqu 16-128(%rax),%xmm0
- vpsrld $10,%xmm0,%xmm4
-
- vpxor %xmm1,%xmm7,%xmm7
- vpsrld $17,%xmm0,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $15,%xmm0,%xmm2
- vpaddd %xmm7,%xmm6,%xmm6
- vpxor %xmm1,%xmm4,%xmm7
- vpsrld $19,%xmm0,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $13,%xmm0,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
- vpaddd %xmm7,%xmm6,%xmm6
- vpsrld $6,%xmm9,%xmm7
- vpslld $26,%xmm9,%xmm2
- vmovdqu %xmm6,48-128(%rax)
- vpaddd %xmm12,%xmm6,%xmm6
-
- vpsrld $11,%xmm9,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $21,%xmm9,%xmm2
- vpaddd -32(%rbp),%xmm6,%xmm6
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $25,%xmm9,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $7,%xmm9,%xmm2
- vpandn %xmm11,%xmm9,%xmm0
- vpand %xmm10,%xmm9,%xmm4
-
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $2,%xmm13,%xmm12
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $30,%xmm13,%xmm1
- vpxor %xmm4,%xmm0,%xmm0
- vpxor %xmm13,%xmm14,%xmm4
-
- vpxor %xmm1,%xmm12,%xmm12
- vpaddd %xmm7,%xmm6,%xmm6
-
- vpsrld $13,%xmm13,%xmm1
-
- vpslld $19,%xmm13,%xmm2
- vpaddd %xmm0,%xmm6,%xmm6
- vpand %xmm4,%xmm3,%xmm3
-
- vpxor %xmm1,%xmm12,%xmm7
-
- vpsrld $22,%xmm13,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $10,%xmm13,%xmm2
- vpxor %xmm3,%xmm14,%xmm12
- vpaddd %xmm6,%xmm8,%xmm8
-
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
-
- vpaddd %xmm6,%xmm12,%xmm12
- vpaddd %xmm7,%xmm12,%xmm12
- vmovdqu 80-128(%rax),%xmm6
- vpaddd 208-128(%rax),%xmm5,%xmm5
-
- vpsrld $3,%xmm6,%xmm7
- vpsrld $7,%xmm6,%xmm1
- vpslld $25,%xmm6,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
- vpsrld $18,%xmm6,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $14,%xmm6,%xmm2
- vmovdqu 32-128(%rax),%xmm0
- vpsrld $10,%xmm0,%xmm3
-
- vpxor %xmm1,%xmm7,%xmm7
- vpsrld $17,%xmm0,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $15,%xmm0,%xmm2
- vpaddd %xmm7,%xmm5,%xmm5
- vpxor %xmm1,%xmm3,%xmm7
- vpsrld $19,%xmm0,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $13,%xmm0,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
- vpaddd %xmm7,%xmm5,%xmm5
- vpsrld $6,%xmm8,%xmm7
- vpslld $26,%xmm8,%xmm2
- vmovdqu %xmm5,64-128(%rax)
- vpaddd %xmm11,%xmm5,%xmm5
-
- vpsrld $11,%xmm8,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $21,%xmm8,%xmm2
- vpaddd 0(%rbp),%xmm5,%xmm5
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $25,%xmm8,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $7,%xmm8,%xmm2
- vpandn %xmm10,%xmm8,%xmm0
- vpand %xmm9,%xmm8,%xmm3
-
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $2,%xmm12,%xmm11
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $30,%xmm12,%xmm1
- vpxor %xmm3,%xmm0,%xmm0
- vpxor %xmm12,%xmm13,%xmm3
-
- vpxor %xmm1,%xmm11,%xmm11
- vpaddd %xmm7,%xmm5,%xmm5
-
- vpsrld $13,%xmm12,%xmm1
-
- vpslld $19,%xmm12,%xmm2
- vpaddd %xmm0,%xmm5,%xmm5
- vpand %xmm3,%xmm4,%xmm4
-
- vpxor %xmm1,%xmm11,%xmm7
-
- vpsrld $22,%xmm12,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $10,%xmm12,%xmm2
- vpxor %xmm4,%xmm13,%xmm11
- vpaddd %xmm5,%xmm15,%xmm15
-
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
-
- vpaddd %xmm5,%xmm11,%xmm11
- vpaddd %xmm7,%xmm11,%xmm11
- vmovdqu 96-128(%rax),%xmm5
- vpaddd 224-128(%rax),%xmm6,%xmm6
-
- vpsrld $3,%xmm5,%xmm7
- vpsrld $7,%xmm5,%xmm1
- vpslld $25,%xmm5,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
- vpsrld $18,%xmm5,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $14,%xmm5,%xmm2
- vmovdqu 48-128(%rax),%xmm0
- vpsrld $10,%xmm0,%xmm4
-
- vpxor %xmm1,%xmm7,%xmm7
- vpsrld $17,%xmm0,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $15,%xmm0,%xmm2
- vpaddd %xmm7,%xmm6,%xmm6
- vpxor %xmm1,%xmm4,%xmm7
- vpsrld $19,%xmm0,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $13,%xmm0,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
- vpaddd %xmm7,%xmm6,%xmm6
- vpsrld $6,%xmm15,%xmm7
- vpslld $26,%xmm15,%xmm2
- vmovdqu %xmm6,80-128(%rax)
- vpaddd %xmm10,%xmm6,%xmm6
-
- vpsrld $11,%xmm15,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $21,%xmm15,%xmm2
- vpaddd 32(%rbp),%xmm6,%xmm6
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $25,%xmm15,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $7,%xmm15,%xmm2
- vpandn %xmm9,%xmm15,%xmm0
- vpand %xmm8,%xmm15,%xmm4
-
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $2,%xmm11,%xmm10
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $30,%xmm11,%xmm1
- vpxor %xmm4,%xmm0,%xmm0
- vpxor %xmm11,%xmm12,%xmm4
-
- vpxor %xmm1,%xmm10,%xmm10
- vpaddd %xmm7,%xmm6,%xmm6
-
- vpsrld $13,%xmm11,%xmm1
-
- vpslld $19,%xmm11,%xmm2
- vpaddd %xmm0,%xmm6,%xmm6
- vpand %xmm4,%xmm3,%xmm3
-
- vpxor %xmm1,%xmm10,%xmm7
-
- vpsrld $22,%xmm11,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $10,%xmm11,%xmm2
- vpxor %xmm3,%xmm12,%xmm10
- vpaddd %xmm6,%xmm14,%xmm14
-
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
-
- vpaddd %xmm6,%xmm10,%xmm10
- vpaddd %xmm7,%xmm10,%xmm10
- vmovdqu 112-128(%rax),%xmm6
- vpaddd 240-128(%rax),%xmm5,%xmm5
-
- vpsrld $3,%xmm6,%xmm7
- vpsrld $7,%xmm6,%xmm1
- vpslld $25,%xmm6,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
- vpsrld $18,%xmm6,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $14,%xmm6,%xmm2
- vmovdqu 64-128(%rax),%xmm0
- vpsrld $10,%xmm0,%xmm3
-
- vpxor %xmm1,%xmm7,%xmm7
- vpsrld $17,%xmm0,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $15,%xmm0,%xmm2
- vpaddd %xmm7,%xmm5,%xmm5
- vpxor %xmm1,%xmm3,%xmm7
- vpsrld $19,%xmm0,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $13,%xmm0,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
- vpaddd %xmm7,%xmm5,%xmm5
- vpsrld $6,%xmm14,%xmm7
- vpslld $26,%xmm14,%xmm2
- vmovdqu %xmm5,96-128(%rax)
- vpaddd %xmm9,%xmm5,%xmm5
-
- vpsrld $11,%xmm14,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $21,%xmm14,%xmm2
- vpaddd 64(%rbp),%xmm5,%xmm5
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $25,%xmm14,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $7,%xmm14,%xmm2
- vpandn %xmm8,%xmm14,%xmm0
- vpand %xmm15,%xmm14,%xmm3
-
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $2,%xmm10,%xmm9
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $30,%xmm10,%xmm1
- vpxor %xmm3,%xmm0,%xmm0
- vpxor %xmm10,%xmm11,%xmm3
-
- vpxor %xmm1,%xmm9,%xmm9
- vpaddd %xmm7,%xmm5,%xmm5
-
- vpsrld $13,%xmm10,%xmm1
-
- vpslld $19,%xmm10,%xmm2
- vpaddd %xmm0,%xmm5,%xmm5
- vpand %xmm3,%xmm4,%xmm4
-
- vpxor %xmm1,%xmm9,%xmm7
-
- vpsrld $22,%xmm10,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $10,%xmm10,%xmm2
- vpxor %xmm4,%xmm11,%xmm9
- vpaddd %xmm5,%xmm13,%xmm13
-
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
-
- vpaddd %xmm5,%xmm9,%xmm9
- vpaddd %xmm7,%xmm9,%xmm9
- vmovdqu 128-128(%rax),%xmm5
- vpaddd 0-128(%rax),%xmm6,%xmm6
-
- vpsrld $3,%xmm5,%xmm7
- vpsrld $7,%xmm5,%xmm1
- vpslld $25,%xmm5,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
- vpsrld $18,%xmm5,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $14,%xmm5,%xmm2
- vmovdqu 80-128(%rax),%xmm0
- vpsrld $10,%xmm0,%xmm4
-
- vpxor %xmm1,%xmm7,%xmm7
- vpsrld $17,%xmm0,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $15,%xmm0,%xmm2
- vpaddd %xmm7,%xmm6,%xmm6
- vpxor %xmm1,%xmm4,%xmm7
- vpsrld $19,%xmm0,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $13,%xmm0,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
- vpaddd %xmm7,%xmm6,%xmm6
- vpsrld $6,%xmm13,%xmm7
- vpslld $26,%xmm13,%xmm2
- vmovdqu %xmm6,112-128(%rax)
- vpaddd %xmm8,%xmm6,%xmm6
-
- vpsrld $11,%xmm13,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $21,%xmm13,%xmm2
- vpaddd 96(%rbp),%xmm6,%xmm6
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $25,%xmm13,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $7,%xmm13,%xmm2
- vpandn %xmm15,%xmm13,%xmm0
- vpand %xmm14,%xmm13,%xmm4
-
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $2,%xmm9,%xmm8
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $30,%xmm9,%xmm1
- vpxor %xmm4,%xmm0,%xmm0
- vpxor %xmm9,%xmm10,%xmm4
-
- vpxor %xmm1,%xmm8,%xmm8
- vpaddd %xmm7,%xmm6,%xmm6
-
- vpsrld $13,%xmm9,%xmm1
-
- vpslld $19,%xmm9,%xmm2
- vpaddd %xmm0,%xmm6,%xmm6
- vpand %xmm4,%xmm3,%xmm3
-
- vpxor %xmm1,%xmm8,%xmm7
-
- vpsrld $22,%xmm9,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $10,%xmm9,%xmm2
- vpxor %xmm3,%xmm10,%xmm8
- vpaddd %xmm6,%xmm12,%xmm12
-
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
-
- vpaddd %xmm6,%xmm8,%xmm8
- vpaddd %xmm7,%xmm8,%xmm8
- addq $256,%rbp
- vmovdqu 144-128(%rax),%xmm6
- vpaddd 16-128(%rax),%xmm5,%xmm5
-
- vpsrld $3,%xmm6,%xmm7
- vpsrld $7,%xmm6,%xmm1
- vpslld $25,%xmm6,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
- vpsrld $18,%xmm6,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $14,%xmm6,%xmm2
- vmovdqu 96-128(%rax),%xmm0
- vpsrld $10,%xmm0,%xmm3
-
- vpxor %xmm1,%xmm7,%xmm7
- vpsrld $17,%xmm0,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $15,%xmm0,%xmm2
- vpaddd %xmm7,%xmm5,%xmm5
- vpxor %xmm1,%xmm3,%xmm7
- vpsrld $19,%xmm0,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $13,%xmm0,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
- vpaddd %xmm7,%xmm5,%xmm5
- vpsrld $6,%xmm12,%xmm7
- vpslld $26,%xmm12,%xmm2
- vmovdqu %xmm5,128-128(%rax)
- vpaddd %xmm15,%xmm5,%xmm5
-
- vpsrld $11,%xmm12,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $21,%xmm12,%xmm2
- vpaddd -128(%rbp),%xmm5,%xmm5
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $25,%xmm12,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $7,%xmm12,%xmm2
- vpandn %xmm14,%xmm12,%xmm0
- vpand %xmm13,%xmm12,%xmm3
-
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $2,%xmm8,%xmm15
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $30,%xmm8,%xmm1
- vpxor %xmm3,%xmm0,%xmm0
- vpxor %xmm8,%xmm9,%xmm3
-
- vpxor %xmm1,%xmm15,%xmm15
- vpaddd %xmm7,%xmm5,%xmm5
-
- vpsrld $13,%xmm8,%xmm1
-
- vpslld $19,%xmm8,%xmm2
- vpaddd %xmm0,%xmm5,%xmm5
- vpand %xmm3,%xmm4,%xmm4
-
- vpxor %xmm1,%xmm15,%xmm7
-
- vpsrld $22,%xmm8,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $10,%xmm8,%xmm2
- vpxor %xmm4,%xmm9,%xmm15
- vpaddd %xmm5,%xmm11,%xmm11
-
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
-
- vpaddd %xmm5,%xmm15,%xmm15
- vpaddd %xmm7,%xmm15,%xmm15
- vmovdqu 160-128(%rax),%xmm5
- vpaddd 32-128(%rax),%xmm6,%xmm6
-
- vpsrld $3,%xmm5,%xmm7
- vpsrld $7,%xmm5,%xmm1
- vpslld $25,%xmm5,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
- vpsrld $18,%xmm5,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $14,%xmm5,%xmm2
- vmovdqu 112-128(%rax),%xmm0
- vpsrld $10,%xmm0,%xmm4
-
- vpxor %xmm1,%xmm7,%xmm7
- vpsrld $17,%xmm0,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $15,%xmm0,%xmm2
- vpaddd %xmm7,%xmm6,%xmm6
- vpxor %xmm1,%xmm4,%xmm7
- vpsrld $19,%xmm0,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $13,%xmm0,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
- vpaddd %xmm7,%xmm6,%xmm6
- vpsrld $6,%xmm11,%xmm7
- vpslld $26,%xmm11,%xmm2
- vmovdqu %xmm6,144-128(%rax)
- vpaddd %xmm14,%xmm6,%xmm6
-
- vpsrld $11,%xmm11,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $21,%xmm11,%xmm2
- vpaddd -96(%rbp),%xmm6,%xmm6
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $25,%xmm11,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $7,%xmm11,%xmm2
- vpandn %xmm13,%xmm11,%xmm0
- vpand %xmm12,%xmm11,%xmm4
-
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $2,%xmm15,%xmm14
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $30,%xmm15,%xmm1
- vpxor %xmm4,%xmm0,%xmm0
- vpxor %xmm15,%xmm8,%xmm4
-
- vpxor %xmm1,%xmm14,%xmm14
- vpaddd %xmm7,%xmm6,%xmm6
-
- vpsrld $13,%xmm15,%xmm1
-
- vpslld $19,%xmm15,%xmm2
- vpaddd %xmm0,%xmm6,%xmm6
- vpand %xmm4,%xmm3,%xmm3
-
- vpxor %xmm1,%xmm14,%xmm7
-
- vpsrld $22,%xmm15,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $10,%xmm15,%xmm2
- vpxor %xmm3,%xmm8,%xmm14
- vpaddd %xmm6,%xmm10,%xmm10
-
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
-
- vpaddd %xmm6,%xmm14,%xmm14
- vpaddd %xmm7,%xmm14,%xmm14
- vmovdqu 176-128(%rax),%xmm6
- vpaddd 48-128(%rax),%xmm5,%xmm5
-
- vpsrld $3,%xmm6,%xmm7
- vpsrld $7,%xmm6,%xmm1
- vpslld $25,%xmm6,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
- vpsrld $18,%xmm6,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $14,%xmm6,%xmm2
- vmovdqu 128-128(%rax),%xmm0
- vpsrld $10,%xmm0,%xmm3
-
- vpxor %xmm1,%xmm7,%xmm7
- vpsrld $17,%xmm0,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $15,%xmm0,%xmm2
- vpaddd %xmm7,%xmm5,%xmm5
- vpxor %xmm1,%xmm3,%xmm7
- vpsrld $19,%xmm0,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $13,%xmm0,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
- vpaddd %xmm7,%xmm5,%xmm5
- vpsrld $6,%xmm10,%xmm7
- vpslld $26,%xmm10,%xmm2
- vmovdqu %xmm5,160-128(%rax)
- vpaddd %xmm13,%xmm5,%xmm5
-
- vpsrld $11,%xmm10,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $21,%xmm10,%xmm2
- vpaddd -64(%rbp),%xmm5,%xmm5
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $25,%xmm10,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $7,%xmm10,%xmm2
- vpandn %xmm12,%xmm10,%xmm0
- vpand %xmm11,%xmm10,%xmm3
-
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $2,%xmm14,%xmm13
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $30,%xmm14,%xmm1
- vpxor %xmm3,%xmm0,%xmm0
- vpxor %xmm14,%xmm15,%xmm3
-
- vpxor %xmm1,%xmm13,%xmm13
- vpaddd %xmm7,%xmm5,%xmm5
-
- vpsrld $13,%xmm14,%xmm1
-
- vpslld $19,%xmm14,%xmm2
- vpaddd %xmm0,%xmm5,%xmm5
- vpand %xmm3,%xmm4,%xmm4
-
- vpxor %xmm1,%xmm13,%xmm7
-
- vpsrld $22,%xmm14,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $10,%xmm14,%xmm2
- vpxor %xmm4,%xmm15,%xmm13
- vpaddd %xmm5,%xmm9,%xmm9
-
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
-
- vpaddd %xmm5,%xmm13,%xmm13
- vpaddd %xmm7,%xmm13,%xmm13
- vmovdqu 192-128(%rax),%xmm5
- vpaddd 64-128(%rax),%xmm6,%xmm6
-
- vpsrld $3,%xmm5,%xmm7
- vpsrld $7,%xmm5,%xmm1
- vpslld $25,%xmm5,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
- vpsrld $18,%xmm5,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $14,%xmm5,%xmm2
- vmovdqu 144-128(%rax),%xmm0
- vpsrld $10,%xmm0,%xmm4
-
- vpxor %xmm1,%xmm7,%xmm7
- vpsrld $17,%xmm0,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $15,%xmm0,%xmm2
- vpaddd %xmm7,%xmm6,%xmm6
- vpxor %xmm1,%xmm4,%xmm7
- vpsrld $19,%xmm0,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $13,%xmm0,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
- vpaddd %xmm7,%xmm6,%xmm6
- vpsrld $6,%xmm9,%xmm7
- vpslld $26,%xmm9,%xmm2
- vmovdqu %xmm6,176-128(%rax)
- vpaddd %xmm12,%xmm6,%xmm6
-
- vpsrld $11,%xmm9,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $21,%xmm9,%xmm2
- vpaddd -32(%rbp),%xmm6,%xmm6
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $25,%xmm9,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $7,%xmm9,%xmm2
- vpandn %xmm11,%xmm9,%xmm0
- vpand %xmm10,%xmm9,%xmm4
-
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $2,%xmm13,%xmm12
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $30,%xmm13,%xmm1
- vpxor %xmm4,%xmm0,%xmm0
- vpxor %xmm13,%xmm14,%xmm4
-
- vpxor %xmm1,%xmm12,%xmm12
- vpaddd %xmm7,%xmm6,%xmm6
-
- vpsrld $13,%xmm13,%xmm1
-
- vpslld $19,%xmm13,%xmm2
- vpaddd %xmm0,%xmm6,%xmm6
- vpand %xmm4,%xmm3,%xmm3
-
- vpxor %xmm1,%xmm12,%xmm7
-
- vpsrld $22,%xmm13,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $10,%xmm13,%xmm2
- vpxor %xmm3,%xmm14,%xmm12
- vpaddd %xmm6,%xmm8,%xmm8
-
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
-
- vpaddd %xmm6,%xmm12,%xmm12
- vpaddd %xmm7,%xmm12,%xmm12
- vmovdqu 208-128(%rax),%xmm6
- vpaddd 80-128(%rax),%xmm5,%xmm5
-
- vpsrld $3,%xmm6,%xmm7
- vpsrld $7,%xmm6,%xmm1
- vpslld $25,%xmm6,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
- vpsrld $18,%xmm6,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $14,%xmm6,%xmm2
- vmovdqu 160-128(%rax),%xmm0
- vpsrld $10,%xmm0,%xmm3
-
- vpxor %xmm1,%xmm7,%xmm7
- vpsrld $17,%xmm0,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $15,%xmm0,%xmm2
- vpaddd %xmm7,%xmm5,%xmm5
- vpxor %xmm1,%xmm3,%xmm7
- vpsrld $19,%xmm0,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $13,%xmm0,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
- vpaddd %xmm7,%xmm5,%xmm5
- vpsrld $6,%xmm8,%xmm7
- vpslld $26,%xmm8,%xmm2
- vmovdqu %xmm5,192-128(%rax)
- vpaddd %xmm11,%xmm5,%xmm5
-
- vpsrld $11,%xmm8,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $21,%xmm8,%xmm2
- vpaddd 0(%rbp),%xmm5,%xmm5
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $25,%xmm8,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $7,%xmm8,%xmm2
- vpandn %xmm10,%xmm8,%xmm0
- vpand %xmm9,%xmm8,%xmm3
-
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $2,%xmm12,%xmm11
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $30,%xmm12,%xmm1
- vpxor %xmm3,%xmm0,%xmm0
- vpxor %xmm12,%xmm13,%xmm3
-
- vpxor %xmm1,%xmm11,%xmm11
- vpaddd %xmm7,%xmm5,%xmm5
-
- vpsrld $13,%xmm12,%xmm1
-
- vpslld $19,%xmm12,%xmm2
- vpaddd %xmm0,%xmm5,%xmm5
- vpand %xmm3,%xmm4,%xmm4
-
- vpxor %xmm1,%xmm11,%xmm7
-
- vpsrld $22,%xmm12,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $10,%xmm12,%xmm2
- vpxor %xmm4,%xmm13,%xmm11
- vpaddd %xmm5,%xmm15,%xmm15
-
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
-
- vpaddd %xmm5,%xmm11,%xmm11
- vpaddd %xmm7,%xmm11,%xmm11
- vmovdqu 224-128(%rax),%xmm5
- vpaddd 96-128(%rax),%xmm6,%xmm6
-
- vpsrld $3,%xmm5,%xmm7
- vpsrld $7,%xmm5,%xmm1
- vpslld $25,%xmm5,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
- vpsrld $18,%xmm5,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $14,%xmm5,%xmm2
- vmovdqu 176-128(%rax),%xmm0
- vpsrld $10,%xmm0,%xmm4
-
- vpxor %xmm1,%xmm7,%xmm7
- vpsrld $17,%xmm0,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $15,%xmm0,%xmm2
- vpaddd %xmm7,%xmm6,%xmm6
- vpxor %xmm1,%xmm4,%xmm7
- vpsrld $19,%xmm0,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $13,%xmm0,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
- vpaddd %xmm7,%xmm6,%xmm6
- vpsrld $6,%xmm15,%xmm7
- vpslld $26,%xmm15,%xmm2
- vmovdqu %xmm6,208-128(%rax)
- vpaddd %xmm10,%xmm6,%xmm6
-
- vpsrld $11,%xmm15,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $21,%xmm15,%xmm2
- vpaddd 32(%rbp),%xmm6,%xmm6
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $25,%xmm15,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $7,%xmm15,%xmm2
- vpandn %xmm9,%xmm15,%xmm0
- vpand %xmm8,%xmm15,%xmm4
-
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $2,%xmm11,%xmm10
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $30,%xmm11,%xmm1
- vpxor %xmm4,%xmm0,%xmm0
- vpxor %xmm11,%xmm12,%xmm4
-
- vpxor %xmm1,%xmm10,%xmm10
- vpaddd %xmm7,%xmm6,%xmm6
-
- vpsrld $13,%xmm11,%xmm1
-
- vpslld $19,%xmm11,%xmm2
- vpaddd %xmm0,%xmm6,%xmm6
- vpand %xmm4,%xmm3,%xmm3
-
- vpxor %xmm1,%xmm10,%xmm7
-
- vpsrld $22,%xmm11,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $10,%xmm11,%xmm2
- vpxor %xmm3,%xmm12,%xmm10
- vpaddd %xmm6,%xmm14,%xmm14
-
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
-
- vpaddd %xmm6,%xmm10,%xmm10
- vpaddd %xmm7,%xmm10,%xmm10
- vmovdqu 240-128(%rax),%xmm6
- vpaddd 112-128(%rax),%xmm5,%xmm5
-
- vpsrld $3,%xmm6,%xmm7
- vpsrld $7,%xmm6,%xmm1
- vpslld $25,%xmm6,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
- vpsrld $18,%xmm6,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $14,%xmm6,%xmm2
- vmovdqu 192-128(%rax),%xmm0
- vpsrld $10,%xmm0,%xmm3
-
- vpxor %xmm1,%xmm7,%xmm7
- vpsrld $17,%xmm0,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $15,%xmm0,%xmm2
- vpaddd %xmm7,%xmm5,%xmm5
- vpxor %xmm1,%xmm3,%xmm7
- vpsrld $19,%xmm0,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $13,%xmm0,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
- vpaddd %xmm7,%xmm5,%xmm5
- vpsrld $6,%xmm14,%xmm7
- vpslld $26,%xmm14,%xmm2
- vmovdqu %xmm5,224-128(%rax)
- vpaddd %xmm9,%xmm5,%xmm5
-
- vpsrld $11,%xmm14,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $21,%xmm14,%xmm2
- vpaddd 64(%rbp),%xmm5,%xmm5
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $25,%xmm14,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $7,%xmm14,%xmm2
- vpandn %xmm8,%xmm14,%xmm0
- vpand %xmm15,%xmm14,%xmm3
-
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $2,%xmm10,%xmm9
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $30,%xmm10,%xmm1
- vpxor %xmm3,%xmm0,%xmm0
- vpxor %xmm10,%xmm11,%xmm3
-
- vpxor %xmm1,%xmm9,%xmm9
- vpaddd %xmm7,%xmm5,%xmm5
-
- vpsrld $13,%xmm10,%xmm1
-
- vpslld $19,%xmm10,%xmm2
- vpaddd %xmm0,%xmm5,%xmm5
- vpand %xmm3,%xmm4,%xmm4
-
- vpxor %xmm1,%xmm9,%xmm7
-
- vpsrld $22,%xmm10,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $10,%xmm10,%xmm2
- vpxor %xmm4,%xmm11,%xmm9
- vpaddd %xmm5,%xmm13,%xmm13
-
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
-
- vpaddd %xmm5,%xmm9,%xmm9
- vpaddd %xmm7,%xmm9,%xmm9
- vmovdqu 0-128(%rax),%xmm5
- vpaddd 128-128(%rax),%xmm6,%xmm6
-
- vpsrld $3,%xmm5,%xmm7
- vpsrld $7,%xmm5,%xmm1
- vpslld $25,%xmm5,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
- vpsrld $18,%xmm5,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $14,%xmm5,%xmm2
- vmovdqu 208-128(%rax),%xmm0
- vpsrld $10,%xmm0,%xmm4
-
- vpxor %xmm1,%xmm7,%xmm7
- vpsrld $17,%xmm0,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $15,%xmm0,%xmm2
- vpaddd %xmm7,%xmm6,%xmm6
- vpxor %xmm1,%xmm4,%xmm7
- vpsrld $19,%xmm0,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $13,%xmm0,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
- vpaddd %xmm7,%xmm6,%xmm6
- vpsrld $6,%xmm13,%xmm7
- vpslld $26,%xmm13,%xmm2
- vmovdqu %xmm6,240-128(%rax)
- vpaddd %xmm8,%xmm6,%xmm6
-
- vpsrld $11,%xmm13,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
- vpslld $21,%xmm13,%xmm2
- vpaddd 96(%rbp),%xmm6,%xmm6
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $25,%xmm13,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $7,%xmm13,%xmm2
- vpandn %xmm15,%xmm13,%xmm0
- vpand %xmm14,%xmm13,%xmm4
-
- vpxor %xmm1,%xmm7,%xmm7
-
- vpsrld $2,%xmm9,%xmm8
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $30,%xmm9,%xmm1
- vpxor %xmm4,%xmm0,%xmm0
- vpxor %xmm9,%xmm10,%xmm4
-
- vpxor %xmm1,%xmm8,%xmm8
- vpaddd %xmm7,%xmm6,%xmm6
-
- vpsrld $13,%xmm9,%xmm1
-
- vpslld $19,%xmm9,%xmm2
- vpaddd %xmm0,%xmm6,%xmm6
- vpand %xmm4,%xmm3,%xmm3
-
- vpxor %xmm1,%xmm8,%xmm7
-
- vpsrld $22,%xmm9,%xmm1
- vpxor %xmm2,%xmm7,%xmm7
-
- vpslld $10,%xmm9,%xmm2
- vpxor %xmm3,%xmm10,%xmm8
- vpaddd %xmm6,%xmm12,%xmm12
-
- vpxor %xmm1,%xmm7,%xmm7
- vpxor %xmm2,%xmm7,%xmm7
-
- vpaddd %xmm6,%xmm8,%xmm8
- vpaddd %xmm7,%xmm8,%xmm8
- addq $256,%rbp
- decl %ecx
- jnz .Loop_16_xx_avx
-
- movl $1,%ecx
- leaq K256+128(%rip),%rbp
- cmpl 0(%rbx),%ecx
- cmovgeq %rbp,%r8
- cmpl 4(%rbx),%ecx
- cmovgeq %rbp,%r9
- cmpl 8(%rbx),%ecx
- cmovgeq %rbp,%r10
- cmpl 12(%rbx),%ecx
- cmovgeq %rbp,%r11
- vmovdqa (%rbx),%xmm7
- vpxor %xmm0,%xmm0,%xmm0
- vmovdqa %xmm7,%xmm6
- vpcmpgtd %xmm0,%xmm6,%xmm6
- vpaddd %xmm6,%xmm7,%xmm7
-
- vmovdqu 0-128(%rdi),%xmm0
- vpand %xmm6,%xmm8,%xmm8
- vmovdqu 32-128(%rdi),%xmm1
- vpand %xmm6,%xmm9,%xmm9
- vmovdqu 64-128(%rdi),%xmm2
- vpand %xmm6,%xmm10,%xmm10
- vmovdqu 96-128(%rdi),%xmm5
- vpand %xmm6,%xmm11,%xmm11
- vpaddd %xmm0,%xmm8,%xmm8
- vmovdqu 128-128(%rdi),%xmm0
- vpand %xmm6,%xmm12,%xmm12
- vpaddd %xmm1,%xmm9,%xmm9
- vmovdqu 160-128(%rdi),%xmm1
- vpand %xmm6,%xmm13,%xmm13
- vpaddd %xmm2,%xmm10,%xmm10
- vmovdqu 192-128(%rdi),%xmm2
- vpand %xmm6,%xmm14,%xmm14
- vpaddd %xmm5,%xmm11,%xmm11
- vmovdqu 224-128(%rdi),%xmm5
- vpand %xmm6,%xmm15,%xmm15
- vpaddd %xmm0,%xmm12,%xmm12
- vpaddd %xmm1,%xmm13,%xmm13
- vmovdqu %xmm8,0-128(%rdi)
- vpaddd %xmm2,%xmm14,%xmm14
- vmovdqu %xmm9,32-128(%rdi)
- vpaddd %xmm5,%xmm15,%xmm15
- vmovdqu %xmm10,64-128(%rdi)
- vmovdqu %xmm11,96-128(%rdi)
- vmovdqu %xmm12,128-128(%rdi)
- vmovdqu %xmm13,160-128(%rdi)
- vmovdqu %xmm14,192-128(%rdi)
- vmovdqu %xmm15,224-128(%rdi)
-
- vmovdqu %xmm7,(%rbx)
- vmovdqu .Lpbswap(%rip),%xmm6
- decl %edx
- jnz .Loop_avx
-
- movl 280(%rsp),%edx
- leaq 16(%rdi),%rdi
- leaq 64(%rsi),%rsi
- decl %edx
- jnz .Loop_grande_avx
-
-.Ldone_avx:
- movq 272(%rsp),%rax
-.cfi_def_cfa %rax,8
- vzeroupper
- movq -16(%rax),%rbp
-.cfi_restore %rbp
- movq -8(%rax),%rbx
-.cfi_restore %rbx
- leaq (%rax),%rsp
-.cfi_def_cfa_register %rsp
-.Lepilogue_avx:
- .byte 0xf3,0xc3
-.cfi_endproc
-.size sha256_multi_block_avx,.-sha256_multi_block_avx
-.type sha256_multi_block_avx2,@function
-.align 32
-sha256_multi_block_avx2:
-.cfi_startproc
-_avx2_shortcut:
- movq %rsp,%rax
-.cfi_def_cfa_register %rax
- pushq %rbx
-.cfi_offset %rbx,-16
- pushq %rbp
-.cfi_offset %rbp,-24
- pushq %r12
-.cfi_offset %r12,-32
- pushq %r13
-.cfi_offset %r13,-40
- pushq %r14
-.cfi_offset %r14,-48
- pushq %r15
-.cfi_offset %r15,-56
- subq $576,%rsp
- andq $-256,%rsp
- movq %rax,544(%rsp)
-.cfi_escape 0x0f,0x06,0x77,0xa0,0x04,0x06,0x23,0x08
-.Lbody_avx2:
- leaq K256+128(%rip),%rbp
- leaq 128(%rdi),%rdi
-
-.Loop_grande_avx2:
- movl %edx,552(%rsp)
- xorl %edx,%edx
- leaq 512(%rsp),%rbx
- movq 0(%rsi),%r12
- movl 8(%rsi),%ecx
- cmpl %edx,%ecx
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- movl %ecx,0(%rbx)
- cmovleq %rbp,%r12
- movq 16(%rsi),%r13
- movl 24(%rsi),%ecx
- cmpl %edx,%ecx
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- movl %ecx,4(%rbx)
- cmovleq %rbp,%r13
- movq 32(%rsi),%r14
- movl 40(%rsi),%ecx
- cmpl %edx,%ecx
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- movl %ecx,8(%rbx)
- cmovleq %rbp,%r14
- movq 48(%rsi),%r15
- movl 56(%rsi),%ecx
- cmpl %edx,%ecx
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- movl %ecx,12(%rbx)
- cmovleq %rbp,%r15
- movq 64(%rsi),%r8
- movl 72(%rsi),%ecx
- cmpl %edx,%ecx
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- movl %ecx,16(%rbx)
- cmovleq %rbp,%r8
- movq 80(%rsi),%r9
- movl 88(%rsi),%ecx
- cmpl %edx,%ecx
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- movl %ecx,20(%rbx)
- cmovleq %rbp,%r9
- movq 96(%rsi),%r10
- movl 104(%rsi),%ecx
- cmpl %edx,%ecx
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- movl %ecx,24(%rbx)
- cmovleq %rbp,%r10
- movq 112(%rsi),%r11
- movl 120(%rsi),%ecx
- cmpl %edx,%ecx
- cmovgl %ecx,%edx
- testl %ecx,%ecx
- movl %ecx,28(%rbx)
- cmovleq %rbp,%r11
- vmovdqu 0-128(%rdi),%ymm8
- leaq 128(%rsp),%rax
- vmovdqu 32-128(%rdi),%ymm9
- leaq 256+128(%rsp),%rbx
- vmovdqu 64-128(%rdi),%ymm10
- vmovdqu 96-128(%rdi),%ymm11
- vmovdqu 128-128(%rdi),%ymm12
- vmovdqu 160-128(%rdi),%ymm13
- vmovdqu 192-128(%rdi),%ymm14
- vmovdqu 224-128(%rdi),%ymm15
- vmovdqu .Lpbswap(%rip),%ymm6
- jmp .Loop_avx2
-
-.align 32
-.Loop_avx2:
- vpxor %ymm9,%ymm10,%ymm4
- vmovd 0(%r12),%xmm5
- vmovd 0(%r8),%xmm0
- vmovd 0(%r13),%xmm1
- vmovd 0(%r9),%xmm2
- vpinsrd $1,0(%r14),%xmm5,%xmm5
- vpinsrd $1,0(%r10),%xmm0,%xmm0
- vpinsrd $1,0(%r15),%xmm1,%xmm1
- vpunpckldq %ymm1,%ymm5,%ymm5
- vpinsrd $1,0(%r11),%xmm2,%xmm2
- vpunpckldq %ymm2,%ymm0,%ymm0
- vinserti128 $1,%xmm0,%ymm5,%ymm5
- vpshufb %ymm6,%ymm5,%ymm5
- vpsrld $6,%ymm12,%ymm7
- vpslld $26,%ymm12,%ymm2
- vmovdqu %ymm5,0-128(%rax)
- vpaddd %ymm15,%ymm5,%ymm5
-
- vpsrld $11,%ymm12,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $21,%ymm12,%ymm2
- vpaddd -128(%rbp),%ymm5,%ymm5
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $25,%ymm12,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $7,%ymm12,%ymm2
- vpandn %ymm14,%ymm12,%ymm0
- vpand %ymm13,%ymm12,%ymm3
-
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $2,%ymm8,%ymm15
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $30,%ymm8,%ymm1
- vpxor %ymm3,%ymm0,%ymm0
- vpxor %ymm8,%ymm9,%ymm3
-
- vpxor %ymm1,%ymm15,%ymm15
- vpaddd %ymm7,%ymm5,%ymm5
-
- vpsrld $13,%ymm8,%ymm1
-
- vpslld $19,%ymm8,%ymm2
- vpaddd %ymm0,%ymm5,%ymm5
- vpand %ymm3,%ymm4,%ymm4
-
- vpxor %ymm1,%ymm15,%ymm7
-
- vpsrld $22,%ymm8,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $10,%ymm8,%ymm2
- vpxor %ymm4,%ymm9,%ymm15
- vpaddd %ymm5,%ymm11,%ymm11
-
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
-
- vpaddd %ymm5,%ymm15,%ymm15
- vpaddd %ymm7,%ymm15,%ymm15
- vmovd 4(%r12),%xmm5
- vmovd 4(%r8),%xmm0
- vmovd 4(%r13),%xmm1
- vmovd 4(%r9),%xmm2
- vpinsrd $1,4(%r14),%xmm5,%xmm5
- vpinsrd $1,4(%r10),%xmm0,%xmm0
- vpinsrd $1,4(%r15),%xmm1,%xmm1
- vpunpckldq %ymm1,%ymm5,%ymm5
- vpinsrd $1,4(%r11),%xmm2,%xmm2
- vpunpckldq %ymm2,%ymm0,%ymm0
- vinserti128 $1,%xmm0,%ymm5,%ymm5
- vpshufb %ymm6,%ymm5,%ymm5
- vpsrld $6,%ymm11,%ymm7
- vpslld $26,%ymm11,%ymm2
- vmovdqu %ymm5,32-128(%rax)
- vpaddd %ymm14,%ymm5,%ymm5
-
- vpsrld $11,%ymm11,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $21,%ymm11,%ymm2
- vpaddd -96(%rbp),%ymm5,%ymm5
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $25,%ymm11,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $7,%ymm11,%ymm2
- vpandn %ymm13,%ymm11,%ymm0
- vpand %ymm12,%ymm11,%ymm4
-
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $2,%ymm15,%ymm14
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $30,%ymm15,%ymm1
- vpxor %ymm4,%ymm0,%ymm0
- vpxor %ymm15,%ymm8,%ymm4
-
- vpxor %ymm1,%ymm14,%ymm14
- vpaddd %ymm7,%ymm5,%ymm5
-
- vpsrld $13,%ymm15,%ymm1
-
- vpslld $19,%ymm15,%ymm2
- vpaddd %ymm0,%ymm5,%ymm5
- vpand %ymm4,%ymm3,%ymm3
-
- vpxor %ymm1,%ymm14,%ymm7
-
- vpsrld $22,%ymm15,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $10,%ymm15,%ymm2
- vpxor %ymm3,%ymm8,%ymm14
- vpaddd %ymm5,%ymm10,%ymm10
-
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
-
- vpaddd %ymm5,%ymm14,%ymm14
- vpaddd %ymm7,%ymm14,%ymm14
- vmovd 8(%r12),%xmm5
- vmovd 8(%r8),%xmm0
- vmovd 8(%r13),%xmm1
- vmovd 8(%r9),%xmm2
- vpinsrd $1,8(%r14),%xmm5,%xmm5
- vpinsrd $1,8(%r10),%xmm0,%xmm0
- vpinsrd $1,8(%r15),%xmm1,%xmm1
- vpunpckldq %ymm1,%ymm5,%ymm5
- vpinsrd $1,8(%r11),%xmm2,%xmm2
- vpunpckldq %ymm2,%ymm0,%ymm0
- vinserti128 $1,%xmm0,%ymm5,%ymm5
- vpshufb %ymm6,%ymm5,%ymm5
- vpsrld $6,%ymm10,%ymm7
- vpslld $26,%ymm10,%ymm2
- vmovdqu %ymm5,64-128(%rax)
- vpaddd %ymm13,%ymm5,%ymm5
-
- vpsrld $11,%ymm10,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $21,%ymm10,%ymm2
- vpaddd -64(%rbp),%ymm5,%ymm5
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $25,%ymm10,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $7,%ymm10,%ymm2
- vpandn %ymm12,%ymm10,%ymm0
- vpand %ymm11,%ymm10,%ymm3
-
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $2,%ymm14,%ymm13
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $30,%ymm14,%ymm1
- vpxor %ymm3,%ymm0,%ymm0
- vpxor %ymm14,%ymm15,%ymm3
-
- vpxor %ymm1,%ymm13,%ymm13
- vpaddd %ymm7,%ymm5,%ymm5
-
- vpsrld $13,%ymm14,%ymm1
-
- vpslld $19,%ymm14,%ymm2
- vpaddd %ymm0,%ymm5,%ymm5
- vpand %ymm3,%ymm4,%ymm4
-
- vpxor %ymm1,%ymm13,%ymm7
-
- vpsrld $22,%ymm14,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $10,%ymm14,%ymm2
- vpxor %ymm4,%ymm15,%ymm13
- vpaddd %ymm5,%ymm9,%ymm9
-
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
-
- vpaddd %ymm5,%ymm13,%ymm13
- vpaddd %ymm7,%ymm13,%ymm13
- vmovd 12(%r12),%xmm5
- vmovd 12(%r8),%xmm0
- vmovd 12(%r13),%xmm1
- vmovd 12(%r9),%xmm2
- vpinsrd $1,12(%r14),%xmm5,%xmm5
- vpinsrd $1,12(%r10),%xmm0,%xmm0
- vpinsrd $1,12(%r15),%xmm1,%xmm1
- vpunpckldq %ymm1,%ymm5,%ymm5
- vpinsrd $1,12(%r11),%xmm2,%xmm2
- vpunpckldq %ymm2,%ymm0,%ymm0
- vinserti128 $1,%xmm0,%ymm5,%ymm5
- vpshufb %ymm6,%ymm5,%ymm5
- vpsrld $6,%ymm9,%ymm7
- vpslld $26,%ymm9,%ymm2
- vmovdqu %ymm5,96-128(%rax)
- vpaddd %ymm12,%ymm5,%ymm5
-
- vpsrld $11,%ymm9,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $21,%ymm9,%ymm2
- vpaddd -32(%rbp),%ymm5,%ymm5
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $25,%ymm9,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $7,%ymm9,%ymm2
- vpandn %ymm11,%ymm9,%ymm0
- vpand %ymm10,%ymm9,%ymm4
-
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $2,%ymm13,%ymm12
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $30,%ymm13,%ymm1
- vpxor %ymm4,%ymm0,%ymm0
- vpxor %ymm13,%ymm14,%ymm4
-
- vpxor %ymm1,%ymm12,%ymm12
- vpaddd %ymm7,%ymm5,%ymm5
-
- vpsrld $13,%ymm13,%ymm1
-
- vpslld $19,%ymm13,%ymm2
- vpaddd %ymm0,%ymm5,%ymm5
- vpand %ymm4,%ymm3,%ymm3
-
- vpxor %ymm1,%ymm12,%ymm7
-
- vpsrld $22,%ymm13,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $10,%ymm13,%ymm2
- vpxor %ymm3,%ymm14,%ymm12
- vpaddd %ymm5,%ymm8,%ymm8
-
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
-
- vpaddd %ymm5,%ymm12,%ymm12
- vpaddd %ymm7,%ymm12,%ymm12
- vmovd 16(%r12),%xmm5
- vmovd 16(%r8),%xmm0
- vmovd 16(%r13),%xmm1
- vmovd 16(%r9),%xmm2
- vpinsrd $1,16(%r14),%xmm5,%xmm5
- vpinsrd $1,16(%r10),%xmm0,%xmm0
- vpinsrd $1,16(%r15),%xmm1,%xmm1
- vpunpckldq %ymm1,%ymm5,%ymm5
- vpinsrd $1,16(%r11),%xmm2,%xmm2
- vpunpckldq %ymm2,%ymm0,%ymm0
- vinserti128 $1,%xmm0,%ymm5,%ymm5
- vpshufb %ymm6,%ymm5,%ymm5
- vpsrld $6,%ymm8,%ymm7
- vpslld $26,%ymm8,%ymm2
- vmovdqu %ymm5,128-128(%rax)
- vpaddd %ymm11,%ymm5,%ymm5
-
- vpsrld $11,%ymm8,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $21,%ymm8,%ymm2
- vpaddd 0(%rbp),%ymm5,%ymm5
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $25,%ymm8,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $7,%ymm8,%ymm2
- vpandn %ymm10,%ymm8,%ymm0
- vpand %ymm9,%ymm8,%ymm3
-
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $2,%ymm12,%ymm11
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $30,%ymm12,%ymm1
- vpxor %ymm3,%ymm0,%ymm0
- vpxor %ymm12,%ymm13,%ymm3
-
- vpxor %ymm1,%ymm11,%ymm11
- vpaddd %ymm7,%ymm5,%ymm5
-
- vpsrld $13,%ymm12,%ymm1
-
- vpslld $19,%ymm12,%ymm2
- vpaddd %ymm0,%ymm5,%ymm5
- vpand %ymm3,%ymm4,%ymm4
-
- vpxor %ymm1,%ymm11,%ymm7
-
- vpsrld $22,%ymm12,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $10,%ymm12,%ymm2
- vpxor %ymm4,%ymm13,%ymm11
- vpaddd %ymm5,%ymm15,%ymm15
-
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
-
- vpaddd %ymm5,%ymm11,%ymm11
- vpaddd %ymm7,%ymm11,%ymm11
- vmovd 20(%r12),%xmm5
- vmovd 20(%r8),%xmm0
- vmovd 20(%r13),%xmm1
- vmovd 20(%r9),%xmm2
- vpinsrd $1,20(%r14),%xmm5,%xmm5
- vpinsrd $1,20(%r10),%xmm0,%xmm0
- vpinsrd $1,20(%r15),%xmm1,%xmm1
- vpunpckldq %ymm1,%ymm5,%ymm5
- vpinsrd $1,20(%r11),%xmm2,%xmm2
- vpunpckldq %ymm2,%ymm0,%ymm0
- vinserti128 $1,%xmm0,%ymm5,%ymm5
- vpshufb %ymm6,%ymm5,%ymm5
- vpsrld $6,%ymm15,%ymm7
- vpslld $26,%ymm15,%ymm2
- vmovdqu %ymm5,160-128(%rax)
- vpaddd %ymm10,%ymm5,%ymm5
-
- vpsrld $11,%ymm15,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $21,%ymm15,%ymm2
- vpaddd 32(%rbp),%ymm5,%ymm5
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $25,%ymm15,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $7,%ymm15,%ymm2
- vpandn %ymm9,%ymm15,%ymm0
- vpand %ymm8,%ymm15,%ymm4
-
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $2,%ymm11,%ymm10
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $30,%ymm11,%ymm1
- vpxor %ymm4,%ymm0,%ymm0
- vpxor %ymm11,%ymm12,%ymm4
-
- vpxor %ymm1,%ymm10,%ymm10
- vpaddd %ymm7,%ymm5,%ymm5
-
- vpsrld $13,%ymm11,%ymm1
-
- vpslld $19,%ymm11,%ymm2
- vpaddd %ymm0,%ymm5,%ymm5
- vpand %ymm4,%ymm3,%ymm3
-
- vpxor %ymm1,%ymm10,%ymm7
-
- vpsrld $22,%ymm11,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $10,%ymm11,%ymm2
- vpxor %ymm3,%ymm12,%ymm10
- vpaddd %ymm5,%ymm14,%ymm14
-
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
-
- vpaddd %ymm5,%ymm10,%ymm10
- vpaddd %ymm7,%ymm10,%ymm10
- vmovd 24(%r12),%xmm5
- vmovd 24(%r8),%xmm0
- vmovd 24(%r13),%xmm1
- vmovd 24(%r9),%xmm2
- vpinsrd $1,24(%r14),%xmm5,%xmm5
- vpinsrd $1,24(%r10),%xmm0,%xmm0
- vpinsrd $1,24(%r15),%xmm1,%xmm1
- vpunpckldq %ymm1,%ymm5,%ymm5
- vpinsrd $1,24(%r11),%xmm2,%xmm2
- vpunpckldq %ymm2,%ymm0,%ymm0
- vinserti128 $1,%xmm0,%ymm5,%ymm5
- vpshufb %ymm6,%ymm5,%ymm5
- vpsrld $6,%ymm14,%ymm7
- vpslld $26,%ymm14,%ymm2
- vmovdqu %ymm5,192-128(%rax)
- vpaddd %ymm9,%ymm5,%ymm5
-
- vpsrld $11,%ymm14,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $21,%ymm14,%ymm2
- vpaddd 64(%rbp),%ymm5,%ymm5
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $25,%ymm14,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $7,%ymm14,%ymm2
- vpandn %ymm8,%ymm14,%ymm0
- vpand %ymm15,%ymm14,%ymm3
-
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $2,%ymm10,%ymm9
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $30,%ymm10,%ymm1
- vpxor %ymm3,%ymm0,%ymm0
- vpxor %ymm10,%ymm11,%ymm3
-
- vpxor %ymm1,%ymm9,%ymm9
- vpaddd %ymm7,%ymm5,%ymm5
-
- vpsrld $13,%ymm10,%ymm1
-
- vpslld $19,%ymm10,%ymm2
- vpaddd %ymm0,%ymm5,%ymm5
- vpand %ymm3,%ymm4,%ymm4
-
- vpxor %ymm1,%ymm9,%ymm7
-
- vpsrld $22,%ymm10,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $10,%ymm10,%ymm2
- vpxor %ymm4,%ymm11,%ymm9
- vpaddd %ymm5,%ymm13,%ymm13
-
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
-
- vpaddd %ymm5,%ymm9,%ymm9
- vpaddd %ymm7,%ymm9,%ymm9
- vmovd 28(%r12),%xmm5
- vmovd 28(%r8),%xmm0
- vmovd 28(%r13),%xmm1
- vmovd 28(%r9),%xmm2
- vpinsrd $1,28(%r14),%xmm5,%xmm5
- vpinsrd $1,28(%r10),%xmm0,%xmm0
- vpinsrd $1,28(%r15),%xmm1,%xmm1
- vpunpckldq %ymm1,%ymm5,%ymm5
- vpinsrd $1,28(%r11),%xmm2,%xmm2
- vpunpckldq %ymm2,%ymm0,%ymm0
- vinserti128 $1,%xmm0,%ymm5,%ymm5
- vpshufb %ymm6,%ymm5,%ymm5
- vpsrld $6,%ymm13,%ymm7
- vpslld $26,%ymm13,%ymm2
- vmovdqu %ymm5,224-128(%rax)
- vpaddd %ymm8,%ymm5,%ymm5
-
- vpsrld $11,%ymm13,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $21,%ymm13,%ymm2
- vpaddd 96(%rbp),%ymm5,%ymm5
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $25,%ymm13,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $7,%ymm13,%ymm2
- vpandn %ymm15,%ymm13,%ymm0
- vpand %ymm14,%ymm13,%ymm4
-
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $2,%ymm9,%ymm8
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $30,%ymm9,%ymm1
- vpxor %ymm4,%ymm0,%ymm0
- vpxor %ymm9,%ymm10,%ymm4
-
- vpxor %ymm1,%ymm8,%ymm8
- vpaddd %ymm7,%ymm5,%ymm5
-
- vpsrld $13,%ymm9,%ymm1
-
- vpslld $19,%ymm9,%ymm2
- vpaddd %ymm0,%ymm5,%ymm5
- vpand %ymm4,%ymm3,%ymm3
-
- vpxor %ymm1,%ymm8,%ymm7
-
- vpsrld $22,%ymm9,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $10,%ymm9,%ymm2
- vpxor %ymm3,%ymm10,%ymm8
- vpaddd %ymm5,%ymm12,%ymm12
-
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
-
- vpaddd %ymm5,%ymm8,%ymm8
- vpaddd %ymm7,%ymm8,%ymm8
- addq $256,%rbp
- vmovd 32(%r12),%xmm5
- vmovd 32(%r8),%xmm0
- vmovd 32(%r13),%xmm1
- vmovd 32(%r9),%xmm2
- vpinsrd $1,32(%r14),%xmm5,%xmm5
- vpinsrd $1,32(%r10),%xmm0,%xmm0
- vpinsrd $1,32(%r15),%xmm1,%xmm1
- vpunpckldq %ymm1,%ymm5,%ymm5
- vpinsrd $1,32(%r11),%xmm2,%xmm2
- vpunpckldq %ymm2,%ymm0,%ymm0
- vinserti128 $1,%xmm0,%ymm5,%ymm5
- vpshufb %ymm6,%ymm5,%ymm5
- vpsrld $6,%ymm12,%ymm7
- vpslld $26,%ymm12,%ymm2
- vmovdqu %ymm5,256-256-128(%rbx)
- vpaddd %ymm15,%ymm5,%ymm5
-
- vpsrld $11,%ymm12,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $21,%ymm12,%ymm2
- vpaddd -128(%rbp),%ymm5,%ymm5
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $25,%ymm12,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $7,%ymm12,%ymm2
- vpandn %ymm14,%ymm12,%ymm0
- vpand %ymm13,%ymm12,%ymm3
-
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $2,%ymm8,%ymm15
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $30,%ymm8,%ymm1
- vpxor %ymm3,%ymm0,%ymm0
- vpxor %ymm8,%ymm9,%ymm3
-
- vpxor %ymm1,%ymm15,%ymm15
- vpaddd %ymm7,%ymm5,%ymm5
-
- vpsrld $13,%ymm8,%ymm1
-
- vpslld $19,%ymm8,%ymm2
- vpaddd %ymm0,%ymm5,%ymm5
- vpand %ymm3,%ymm4,%ymm4
-
- vpxor %ymm1,%ymm15,%ymm7
-
- vpsrld $22,%ymm8,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $10,%ymm8,%ymm2
- vpxor %ymm4,%ymm9,%ymm15
- vpaddd %ymm5,%ymm11,%ymm11
-
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
-
- vpaddd %ymm5,%ymm15,%ymm15
- vpaddd %ymm7,%ymm15,%ymm15
- vmovd 36(%r12),%xmm5
- vmovd 36(%r8),%xmm0
- vmovd 36(%r13),%xmm1
- vmovd 36(%r9),%xmm2
- vpinsrd $1,36(%r14),%xmm5,%xmm5
- vpinsrd $1,36(%r10),%xmm0,%xmm0
- vpinsrd $1,36(%r15),%xmm1,%xmm1
- vpunpckldq %ymm1,%ymm5,%ymm5
- vpinsrd $1,36(%r11),%xmm2,%xmm2
- vpunpckldq %ymm2,%ymm0,%ymm0
- vinserti128 $1,%xmm0,%ymm5,%ymm5
- vpshufb %ymm6,%ymm5,%ymm5
- vpsrld $6,%ymm11,%ymm7
- vpslld $26,%ymm11,%ymm2
- vmovdqu %ymm5,288-256-128(%rbx)
- vpaddd %ymm14,%ymm5,%ymm5
-
- vpsrld $11,%ymm11,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $21,%ymm11,%ymm2
- vpaddd -96(%rbp),%ymm5,%ymm5
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $25,%ymm11,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $7,%ymm11,%ymm2
- vpandn %ymm13,%ymm11,%ymm0
- vpand %ymm12,%ymm11,%ymm4
-
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $2,%ymm15,%ymm14
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $30,%ymm15,%ymm1
- vpxor %ymm4,%ymm0,%ymm0
- vpxor %ymm15,%ymm8,%ymm4
-
- vpxor %ymm1,%ymm14,%ymm14
- vpaddd %ymm7,%ymm5,%ymm5
-
- vpsrld $13,%ymm15,%ymm1
-
- vpslld $19,%ymm15,%ymm2
- vpaddd %ymm0,%ymm5,%ymm5
- vpand %ymm4,%ymm3,%ymm3
-
- vpxor %ymm1,%ymm14,%ymm7
-
- vpsrld $22,%ymm15,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $10,%ymm15,%ymm2
- vpxor %ymm3,%ymm8,%ymm14
- vpaddd %ymm5,%ymm10,%ymm10
-
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
-
- vpaddd %ymm5,%ymm14,%ymm14
- vpaddd %ymm7,%ymm14,%ymm14
- vmovd 40(%r12),%xmm5
- vmovd 40(%r8),%xmm0
- vmovd 40(%r13),%xmm1
- vmovd 40(%r9),%xmm2
- vpinsrd $1,40(%r14),%xmm5,%xmm5
- vpinsrd $1,40(%r10),%xmm0,%xmm0
- vpinsrd $1,40(%r15),%xmm1,%xmm1
- vpunpckldq %ymm1,%ymm5,%ymm5
- vpinsrd $1,40(%r11),%xmm2,%xmm2
- vpunpckldq %ymm2,%ymm0,%ymm0
- vinserti128 $1,%xmm0,%ymm5,%ymm5
- vpshufb %ymm6,%ymm5,%ymm5
- vpsrld $6,%ymm10,%ymm7
- vpslld $26,%ymm10,%ymm2
- vmovdqu %ymm5,320-256-128(%rbx)
- vpaddd %ymm13,%ymm5,%ymm5
-
- vpsrld $11,%ymm10,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $21,%ymm10,%ymm2
- vpaddd -64(%rbp),%ymm5,%ymm5
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $25,%ymm10,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $7,%ymm10,%ymm2
- vpandn %ymm12,%ymm10,%ymm0
- vpand %ymm11,%ymm10,%ymm3
-
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $2,%ymm14,%ymm13
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $30,%ymm14,%ymm1
- vpxor %ymm3,%ymm0,%ymm0
- vpxor %ymm14,%ymm15,%ymm3
-
- vpxor %ymm1,%ymm13,%ymm13
- vpaddd %ymm7,%ymm5,%ymm5
-
- vpsrld $13,%ymm14,%ymm1
-
- vpslld $19,%ymm14,%ymm2
- vpaddd %ymm0,%ymm5,%ymm5
- vpand %ymm3,%ymm4,%ymm4
-
- vpxor %ymm1,%ymm13,%ymm7
-
- vpsrld $22,%ymm14,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $10,%ymm14,%ymm2
- vpxor %ymm4,%ymm15,%ymm13
- vpaddd %ymm5,%ymm9,%ymm9
-
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
-
- vpaddd %ymm5,%ymm13,%ymm13
- vpaddd %ymm7,%ymm13,%ymm13
- vmovd 44(%r12),%xmm5
- vmovd 44(%r8),%xmm0
- vmovd 44(%r13),%xmm1
- vmovd 44(%r9),%xmm2
- vpinsrd $1,44(%r14),%xmm5,%xmm5
- vpinsrd $1,44(%r10),%xmm0,%xmm0
- vpinsrd $1,44(%r15),%xmm1,%xmm1
- vpunpckldq %ymm1,%ymm5,%ymm5
- vpinsrd $1,44(%r11),%xmm2,%xmm2
- vpunpckldq %ymm2,%ymm0,%ymm0
- vinserti128 $1,%xmm0,%ymm5,%ymm5
- vpshufb %ymm6,%ymm5,%ymm5
- vpsrld $6,%ymm9,%ymm7
- vpslld $26,%ymm9,%ymm2
- vmovdqu %ymm5,352-256-128(%rbx)
- vpaddd %ymm12,%ymm5,%ymm5
-
- vpsrld $11,%ymm9,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $21,%ymm9,%ymm2
- vpaddd -32(%rbp),%ymm5,%ymm5
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $25,%ymm9,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $7,%ymm9,%ymm2
- vpandn %ymm11,%ymm9,%ymm0
- vpand %ymm10,%ymm9,%ymm4
-
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $2,%ymm13,%ymm12
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $30,%ymm13,%ymm1
- vpxor %ymm4,%ymm0,%ymm0
- vpxor %ymm13,%ymm14,%ymm4
-
- vpxor %ymm1,%ymm12,%ymm12
- vpaddd %ymm7,%ymm5,%ymm5
-
- vpsrld $13,%ymm13,%ymm1
-
- vpslld $19,%ymm13,%ymm2
- vpaddd %ymm0,%ymm5,%ymm5
- vpand %ymm4,%ymm3,%ymm3
-
- vpxor %ymm1,%ymm12,%ymm7
-
- vpsrld $22,%ymm13,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $10,%ymm13,%ymm2
- vpxor %ymm3,%ymm14,%ymm12
- vpaddd %ymm5,%ymm8,%ymm8
-
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
-
- vpaddd %ymm5,%ymm12,%ymm12
- vpaddd %ymm7,%ymm12,%ymm12
- vmovd 48(%r12),%xmm5
- vmovd 48(%r8),%xmm0
- vmovd 48(%r13),%xmm1
- vmovd 48(%r9),%xmm2
- vpinsrd $1,48(%r14),%xmm5,%xmm5
- vpinsrd $1,48(%r10),%xmm0,%xmm0
- vpinsrd $1,48(%r15),%xmm1,%xmm1
- vpunpckldq %ymm1,%ymm5,%ymm5
- vpinsrd $1,48(%r11),%xmm2,%xmm2
- vpunpckldq %ymm2,%ymm0,%ymm0
- vinserti128 $1,%xmm0,%ymm5,%ymm5
- vpshufb %ymm6,%ymm5,%ymm5
- vpsrld $6,%ymm8,%ymm7
- vpslld $26,%ymm8,%ymm2
- vmovdqu %ymm5,384-256-128(%rbx)
- vpaddd %ymm11,%ymm5,%ymm5
-
- vpsrld $11,%ymm8,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $21,%ymm8,%ymm2
- vpaddd 0(%rbp),%ymm5,%ymm5
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $25,%ymm8,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $7,%ymm8,%ymm2
- vpandn %ymm10,%ymm8,%ymm0
- vpand %ymm9,%ymm8,%ymm3
-
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $2,%ymm12,%ymm11
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $30,%ymm12,%ymm1
- vpxor %ymm3,%ymm0,%ymm0
- vpxor %ymm12,%ymm13,%ymm3
-
- vpxor %ymm1,%ymm11,%ymm11
- vpaddd %ymm7,%ymm5,%ymm5
-
- vpsrld $13,%ymm12,%ymm1
-
- vpslld $19,%ymm12,%ymm2
- vpaddd %ymm0,%ymm5,%ymm5
- vpand %ymm3,%ymm4,%ymm4
-
- vpxor %ymm1,%ymm11,%ymm7
-
- vpsrld $22,%ymm12,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $10,%ymm12,%ymm2
- vpxor %ymm4,%ymm13,%ymm11
- vpaddd %ymm5,%ymm15,%ymm15
-
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
-
- vpaddd %ymm5,%ymm11,%ymm11
- vpaddd %ymm7,%ymm11,%ymm11
- vmovd 52(%r12),%xmm5
- vmovd 52(%r8),%xmm0
- vmovd 52(%r13),%xmm1
- vmovd 52(%r9),%xmm2
- vpinsrd $1,52(%r14),%xmm5,%xmm5
- vpinsrd $1,52(%r10),%xmm0,%xmm0
- vpinsrd $1,52(%r15),%xmm1,%xmm1
- vpunpckldq %ymm1,%ymm5,%ymm5
- vpinsrd $1,52(%r11),%xmm2,%xmm2
- vpunpckldq %ymm2,%ymm0,%ymm0
- vinserti128 $1,%xmm0,%ymm5,%ymm5
- vpshufb %ymm6,%ymm5,%ymm5
- vpsrld $6,%ymm15,%ymm7
- vpslld $26,%ymm15,%ymm2
- vmovdqu %ymm5,416-256-128(%rbx)
- vpaddd %ymm10,%ymm5,%ymm5
-
- vpsrld $11,%ymm15,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $21,%ymm15,%ymm2
- vpaddd 32(%rbp),%ymm5,%ymm5
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $25,%ymm15,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $7,%ymm15,%ymm2
- vpandn %ymm9,%ymm15,%ymm0
- vpand %ymm8,%ymm15,%ymm4
-
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $2,%ymm11,%ymm10
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $30,%ymm11,%ymm1
- vpxor %ymm4,%ymm0,%ymm0
- vpxor %ymm11,%ymm12,%ymm4
-
- vpxor %ymm1,%ymm10,%ymm10
- vpaddd %ymm7,%ymm5,%ymm5
-
- vpsrld $13,%ymm11,%ymm1
-
- vpslld $19,%ymm11,%ymm2
- vpaddd %ymm0,%ymm5,%ymm5
- vpand %ymm4,%ymm3,%ymm3
-
- vpxor %ymm1,%ymm10,%ymm7
-
- vpsrld $22,%ymm11,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $10,%ymm11,%ymm2
- vpxor %ymm3,%ymm12,%ymm10
- vpaddd %ymm5,%ymm14,%ymm14
-
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
-
- vpaddd %ymm5,%ymm10,%ymm10
- vpaddd %ymm7,%ymm10,%ymm10
- vmovd 56(%r12),%xmm5
- vmovd 56(%r8),%xmm0
- vmovd 56(%r13),%xmm1
- vmovd 56(%r9),%xmm2
- vpinsrd $1,56(%r14),%xmm5,%xmm5
- vpinsrd $1,56(%r10),%xmm0,%xmm0
- vpinsrd $1,56(%r15),%xmm1,%xmm1
- vpunpckldq %ymm1,%ymm5,%ymm5
- vpinsrd $1,56(%r11),%xmm2,%xmm2
- vpunpckldq %ymm2,%ymm0,%ymm0
- vinserti128 $1,%xmm0,%ymm5,%ymm5
- vpshufb %ymm6,%ymm5,%ymm5
- vpsrld $6,%ymm14,%ymm7
- vpslld $26,%ymm14,%ymm2
- vmovdqu %ymm5,448-256-128(%rbx)
- vpaddd %ymm9,%ymm5,%ymm5
-
- vpsrld $11,%ymm14,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $21,%ymm14,%ymm2
- vpaddd 64(%rbp),%ymm5,%ymm5
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $25,%ymm14,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $7,%ymm14,%ymm2
- vpandn %ymm8,%ymm14,%ymm0
- vpand %ymm15,%ymm14,%ymm3
-
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $2,%ymm10,%ymm9
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $30,%ymm10,%ymm1
- vpxor %ymm3,%ymm0,%ymm0
- vpxor %ymm10,%ymm11,%ymm3
-
- vpxor %ymm1,%ymm9,%ymm9
- vpaddd %ymm7,%ymm5,%ymm5
-
- vpsrld $13,%ymm10,%ymm1
-
- vpslld $19,%ymm10,%ymm2
- vpaddd %ymm0,%ymm5,%ymm5
- vpand %ymm3,%ymm4,%ymm4
-
- vpxor %ymm1,%ymm9,%ymm7
-
- vpsrld $22,%ymm10,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $10,%ymm10,%ymm2
- vpxor %ymm4,%ymm11,%ymm9
- vpaddd %ymm5,%ymm13,%ymm13
-
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
-
- vpaddd %ymm5,%ymm9,%ymm9
- vpaddd %ymm7,%ymm9,%ymm9
- vmovd 60(%r12),%xmm5
- leaq 64(%r12),%r12
- vmovd 60(%r8),%xmm0
- leaq 64(%r8),%r8
- vmovd 60(%r13),%xmm1
- leaq 64(%r13),%r13
- vmovd 60(%r9),%xmm2
- leaq 64(%r9),%r9
- vpinsrd $1,60(%r14),%xmm5,%xmm5
- leaq 64(%r14),%r14
- vpinsrd $1,60(%r10),%xmm0,%xmm0
- leaq 64(%r10),%r10
- vpinsrd $1,60(%r15),%xmm1,%xmm1
- leaq 64(%r15),%r15
- vpunpckldq %ymm1,%ymm5,%ymm5
- vpinsrd $1,60(%r11),%xmm2,%xmm2
- leaq 64(%r11),%r11
- vpunpckldq %ymm2,%ymm0,%ymm0
- vinserti128 $1,%xmm0,%ymm5,%ymm5
- vpshufb %ymm6,%ymm5,%ymm5
- vpsrld $6,%ymm13,%ymm7
- vpslld $26,%ymm13,%ymm2
- vmovdqu %ymm5,480-256-128(%rbx)
- vpaddd %ymm8,%ymm5,%ymm5
-
- vpsrld $11,%ymm13,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $21,%ymm13,%ymm2
- vpaddd 96(%rbp),%ymm5,%ymm5
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $25,%ymm13,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- prefetcht0 63(%r12)
- vpslld $7,%ymm13,%ymm2
- vpandn %ymm15,%ymm13,%ymm0
- vpand %ymm14,%ymm13,%ymm4
- prefetcht0 63(%r13)
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $2,%ymm9,%ymm8
- vpxor %ymm2,%ymm7,%ymm7
- prefetcht0 63(%r14)
- vpslld $30,%ymm9,%ymm1
- vpxor %ymm4,%ymm0,%ymm0
- vpxor %ymm9,%ymm10,%ymm4
- prefetcht0 63(%r15)
- vpxor %ymm1,%ymm8,%ymm8
- vpaddd %ymm7,%ymm5,%ymm5
-
- vpsrld $13,%ymm9,%ymm1
- prefetcht0 63(%r8)
- vpslld $19,%ymm9,%ymm2
- vpaddd %ymm0,%ymm5,%ymm5
- vpand %ymm4,%ymm3,%ymm3
- prefetcht0 63(%r9)
- vpxor %ymm1,%ymm8,%ymm7
-
- vpsrld $22,%ymm9,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- prefetcht0 63(%r10)
- vpslld $10,%ymm9,%ymm2
- vpxor %ymm3,%ymm10,%ymm8
- vpaddd %ymm5,%ymm12,%ymm12
- prefetcht0 63(%r11)
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
-
- vpaddd %ymm5,%ymm8,%ymm8
- vpaddd %ymm7,%ymm8,%ymm8
- addq $256,%rbp
- vmovdqu 0-128(%rax),%ymm5
- movl $3,%ecx
- jmp .Loop_16_xx_avx2
-.align 32
-.Loop_16_xx_avx2:
- vmovdqu 32-128(%rax),%ymm6
- vpaddd 288-256-128(%rbx),%ymm5,%ymm5
-
- vpsrld $3,%ymm6,%ymm7
- vpsrld $7,%ymm6,%ymm1
- vpslld $25,%ymm6,%ymm2
- vpxor %ymm1,%ymm7,%ymm7
- vpsrld $18,%ymm6,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $14,%ymm6,%ymm2
- vmovdqu 448-256-128(%rbx),%ymm0
- vpsrld $10,%ymm0,%ymm3
-
- vpxor %ymm1,%ymm7,%ymm7
- vpsrld $17,%ymm0,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $15,%ymm0,%ymm2
- vpaddd %ymm7,%ymm5,%ymm5
- vpxor %ymm1,%ymm3,%ymm7
- vpsrld $19,%ymm0,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $13,%ymm0,%ymm2
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
- vpaddd %ymm7,%ymm5,%ymm5
- vpsrld $6,%ymm12,%ymm7
- vpslld $26,%ymm12,%ymm2
- vmovdqu %ymm5,0-128(%rax)
- vpaddd %ymm15,%ymm5,%ymm5
-
- vpsrld $11,%ymm12,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $21,%ymm12,%ymm2
- vpaddd -128(%rbp),%ymm5,%ymm5
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $25,%ymm12,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $7,%ymm12,%ymm2
- vpandn %ymm14,%ymm12,%ymm0
- vpand %ymm13,%ymm12,%ymm3
-
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $2,%ymm8,%ymm15
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $30,%ymm8,%ymm1
- vpxor %ymm3,%ymm0,%ymm0
- vpxor %ymm8,%ymm9,%ymm3
-
- vpxor %ymm1,%ymm15,%ymm15
- vpaddd %ymm7,%ymm5,%ymm5
-
- vpsrld $13,%ymm8,%ymm1
-
- vpslld $19,%ymm8,%ymm2
- vpaddd %ymm0,%ymm5,%ymm5
- vpand %ymm3,%ymm4,%ymm4
-
- vpxor %ymm1,%ymm15,%ymm7
-
- vpsrld $22,%ymm8,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $10,%ymm8,%ymm2
- vpxor %ymm4,%ymm9,%ymm15
- vpaddd %ymm5,%ymm11,%ymm11
-
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
-
- vpaddd %ymm5,%ymm15,%ymm15
- vpaddd %ymm7,%ymm15,%ymm15
- vmovdqu 64-128(%rax),%ymm5
- vpaddd 320-256-128(%rbx),%ymm6,%ymm6
-
- vpsrld $3,%ymm5,%ymm7
- vpsrld $7,%ymm5,%ymm1
- vpslld $25,%ymm5,%ymm2
- vpxor %ymm1,%ymm7,%ymm7
- vpsrld $18,%ymm5,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $14,%ymm5,%ymm2
- vmovdqu 480-256-128(%rbx),%ymm0
- vpsrld $10,%ymm0,%ymm4
-
- vpxor %ymm1,%ymm7,%ymm7
- vpsrld $17,%ymm0,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $15,%ymm0,%ymm2
- vpaddd %ymm7,%ymm6,%ymm6
- vpxor %ymm1,%ymm4,%ymm7
- vpsrld $19,%ymm0,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $13,%ymm0,%ymm2
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
- vpaddd %ymm7,%ymm6,%ymm6
- vpsrld $6,%ymm11,%ymm7
- vpslld $26,%ymm11,%ymm2
- vmovdqu %ymm6,32-128(%rax)
- vpaddd %ymm14,%ymm6,%ymm6
-
- vpsrld $11,%ymm11,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $21,%ymm11,%ymm2
- vpaddd -96(%rbp),%ymm6,%ymm6
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $25,%ymm11,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $7,%ymm11,%ymm2
- vpandn %ymm13,%ymm11,%ymm0
- vpand %ymm12,%ymm11,%ymm4
-
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $2,%ymm15,%ymm14
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $30,%ymm15,%ymm1
- vpxor %ymm4,%ymm0,%ymm0
- vpxor %ymm15,%ymm8,%ymm4
-
- vpxor %ymm1,%ymm14,%ymm14
- vpaddd %ymm7,%ymm6,%ymm6
-
- vpsrld $13,%ymm15,%ymm1
-
- vpslld $19,%ymm15,%ymm2
- vpaddd %ymm0,%ymm6,%ymm6
- vpand %ymm4,%ymm3,%ymm3
-
- vpxor %ymm1,%ymm14,%ymm7
-
- vpsrld $22,%ymm15,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $10,%ymm15,%ymm2
- vpxor %ymm3,%ymm8,%ymm14
- vpaddd %ymm6,%ymm10,%ymm10
-
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
-
- vpaddd %ymm6,%ymm14,%ymm14
- vpaddd %ymm7,%ymm14,%ymm14
- vmovdqu 96-128(%rax),%ymm6
- vpaddd 352-256-128(%rbx),%ymm5,%ymm5
-
- vpsrld $3,%ymm6,%ymm7
- vpsrld $7,%ymm6,%ymm1
- vpslld $25,%ymm6,%ymm2
- vpxor %ymm1,%ymm7,%ymm7
- vpsrld $18,%ymm6,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $14,%ymm6,%ymm2
- vmovdqu 0-128(%rax),%ymm0
- vpsrld $10,%ymm0,%ymm3
-
- vpxor %ymm1,%ymm7,%ymm7
- vpsrld $17,%ymm0,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $15,%ymm0,%ymm2
- vpaddd %ymm7,%ymm5,%ymm5
- vpxor %ymm1,%ymm3,%ymm7
- vpsrld $19,%ymm0,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $13,%ymm0,%ymm2
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
- vpaddd %ymm7,%ymm5,%ymm5
- vpsrld $6,%ymm10,%ymm7
- vpslld $26,%ymm10,%ymm2
- vmovdqu %ymm5,64-128(%rax)
- vpaddd %ymm13,%ymm5,%ymm5
-
- vpsrld $11,%ymm10,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $21,%ymm10,%ymm2
- vpaddd -64(%rbp),%ymm5,%ymm5
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $25,%ymm10,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $7,%ymm10,%ymm2
- vpandn %ymm12,%ymm10,%ymm0
- vpand %ymm11,%ymm10,%ymm3
-
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $2,%ymm14,%ymm13
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $30,%ymm14,%ymm1
- vpxor %ymm3,%ymm0,%ymm0
- vpxor %ymm14,%ymm15,%ymm3
-
- vpxor %ymm1,%ymm13,%ymm13
- vpaddd %ymm7,%ymm5,%ymm5
-
- vpsrld $13,%ymm14,%ymm1
-
- vpslld $19,%ymm14,%ymm2
- vpaddd %ymm0,%ymm5,%ymm5
- vpand %ymm3,%ymm4,%ymm4
-
- vpxor %ymm1,%ymm13,%ymm7
-
- vpsrld $22,%ymm14,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $10,%ymm14,%ymm2
- vpxor %ymm4,%ymm15,%ymm13
- vpaddd %ymm5,%ymm9,%ymm9
-
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
-
- vpaddd %ymm5,%ymm13,%ymm13
- vpaddd %ymm7,%ymm13,%ymm13
- vmovdqu 128-128(%rax),%ymm5
- vpaddd 384-256-128(%rbx),%ymm6,%ymm6
-
- vpsrld $3,%ymm5,%ymm7
- vpsrld $7,%ymm5,%ymm1
- vpslld $25,%ymm5,%ymm2
- vpxor %ymm1,%ymm7,%ymm7
- vpsrld $18,%ymm5,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $14,%ymm5,%ymm2
- vmovdqu 32-128(%rax),%ymm0
- vpsrld $10,%ymm0,%ymm4
-
- vpxor %ymm1,%ymm7,%ymm7
- vpsrld $17,%ymm0,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $15,%ymm0,%ymm2
- vpaddd %ymm7,%ymm6,%ymm6
- vpxor %ymm1,%ymm4,%ymm7
- vpsrld $19,%ymm0,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $13,%ymm0,%ymm2
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
- vpaddd %ymm7,%ymm6,%ymm6
- vpsrld $6,%ymm9,%ymm7
- vpslld $26,%ymm9,%ymm2
- vmovdqu %ymm6,96-128(%rax)
- vpaddd %ymm12,%ymm6,%ymm6
-
- vpsrld $11,%ymm9,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $21,%ymm9,%ymm2
- vpaddd -32(%rbp),%ymm6,%ymm6
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $25,%ymm9,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $7,%ymm9,%ymm2
- vpandn %ymm11,%ymm9,%ymm0
- vpand %ymm10,%ymm9,%ymm4
-
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $2,%ymm13,%ymm12
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $30,%ymm13,%ymm1
- vpxor %ymm4,%ymm0,%ymm0
- vpxor %ymm13,%ymm14,%ymm4
-
- vpxor %ymm1,%ymm12,%ymm12
- vpaddd %ymm7,%ymm6,%ymm6
-
- vpsrld $13,%ymm13,%ymm1
-
- vpslld $19,%ymm13,%ymm2
- vpaddd %ymm0,%ymm6,%ymm6
- vpand %ymm4,%ymm3,%ymm3
-
- vpxor %ymm1,%ymm12,%ymm7
-
- vpsrld $22,%ymm13,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $10,%ymm13,%ymm2
- vpxor %ymm3,%ymm14,%ymm12
- vpaddd %ymm6,%ymm8,%ymm8
-
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
-
- vpaddd %ymm6,%ymm12,%ymm12
- vpaddd %ymm7,%ymm12,%ymm12
- vmovdqu 160-128(%rax),%ymm6
- vpaddd 416-256-128(%rbx),%ymm5,%ymm5
-
- vpsrld $3,%ymm6,%ymm7
- vpsrld $7,%ymm6,%ymm1
- vpslld $25,%ymm6,%ymm2
- vpxor %ymm1,%ymm7,%ymm7
- vpsrld $18,%ymm6,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $14,%ymm6,%ymm2
- vmovdqu 64-128(%rax),%ymm0
- vpsrld $10,%ymm0,%ymm3
-
- vpxor %ymm1,%ymm7,%ymm7
- vpsrld $17,%ymm0,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $15,%ymm0,%ymm2
- vpaddd %ymm7,%ymm5,%ymm5
- vpxor %ymm1,%ymm3,%ymm7
- vpsrld $19,%ymm0,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $13,%ymm0,%ymm2
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
- vpaddd %ymm7,%ymm5,%ymm5
- vpsrld $6,%ymm8,%ymm7
- vpslld $26,%ymm8,%ymm2
- vmovdqu %ymm5,128-128(%rax)
- vpaddd %ymm11,%ymm5,%ymm5
-
- vpsrld $11,%ymm8,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $21,%ymm8,%ymm2
- vpaddd 0(%rbp),%ymm5,%ymm5
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $25,%ymm8,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $7,%ymm8,%ymm2
- vpandn %ymm10,%ymm8,%ymm0
- vpand %ymm9,%ymm8,%ymm3
-
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $2,%ymm12,%ymm11
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $30,%ymm12,%ymm1
- vpxor %ymm3,%ymm0,%ymm0
- vpxor %ymm12,%ymm13,%ymm3
-
- vpxor %ymm1,%ymm11,%ymm11
- vpaddd %ymm7,%ymm5,%ymm5
-
- vpsrld $13,%ymm12,%ymm1
-
- vpslld $19,%ymm12,%ymm2
- vpaddd %ymm0,%ymm5,%ymm5
- vpand %ymm3,%ymm4,%ymm4
-
- vpxor %ymm1,%ymm11,%ymm7
-
- vpsrld $22,%ymm12,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $10,%ymm12,%ymm2
- vpxor %ymm4,%ymm13,%ymm11
- vpaddd %ymm5,%ymm15,%ymm15
-
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
-
- vpaddd %ymm5,%ymm11,%ymm11
- vpaddd %ymm7,%ymm11,%ymm11
- vmovdqu 192-128(%rax),%ymm5
- vpaddd 448-256-128(%rbx),%ymm6,%ymm6
-
- vpsrld $3,%ymm5,%ymm7
- vpsrld $7,%ymm5,%ymm1
- vpslld $25,%ymm5,%ymm2
- vpxor %ymm1,%ymm7,%ymm7
- vpsrld $18,%ymm5,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $14,%ymm5,%ymm2
- vmovdqu 96-128(%rax),%ymm0
- vpsrld $10,%ymm0,%ymm4
-
- vpxor %ymm1,%ymm7,%ymm7
- vpsrld $17,%ymm0,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $15,%ymm0,%ymm2
- vpaddd %ymm7,%ymm6,%ymm6
- vpxor %ymm1,%ymm4,%ymm7
- vpsrld $19,%ymm0,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $13,%ymm0,%ymm2
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
- vpaddd %ymm7,%ymm6,%ymm6
- vpsrld $6,%ymm15,%ymm7
- vpslld $26,%ymm15,%ymm2
- vmovdqu %ymm6,160-128(%rax)
- vpaddd %ymm10,%ymm6,%ymm6
-
- vpsrld $11,%ymm15,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $21,%ymm15,%ymm2
- vpaddd 32(%rbp),%ymm6,%ymm6
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $25,%ymm15,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $7,%ymm15,%ymm2
- vpandn %ymm9,%ymm15,%ymm0
- vpand %ymm8,%ymm15,%ymm4
-
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $2,%ymm11,%ymm10
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $30,%ymm11,%ymm1
- vpxor %ymm4,%ymm0,%ymm0
- vpxor %ymm11,%ymm12,%ymm4
-
- vpxor %ymm1,%ymm10,%ymm10
- vpaddd %ymm7,%ymm6,%ymm6
-
- vpsrld $13,%ymm11,%ymm1
-
- vpslld $19,%ymm11,%ymm2
- vpaddd %ymm0,%ymm6,%ymm6
- vpand %ymm4,%ymm3,%ymm3
-
- vpxor %ymm1,%ymm10,%ymm7
-
- vpsrld $22,%ymm11,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $10,%ymm11,%ymm2
- vpxor %ymm3,%ymm12,%ymm10
- vpaddd %ymm6,%ymm14,%ymm14
-
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
-
- vpaddd %ymm6,%ymm10,%ymm10
- vpaddd %ymm7,%ymm10,%ymm10
- vmovdqu 224-128(%rax),%ymm6
- vpaddd 480-256-128(%rbx),%ymm5,%ymm5
-
- vpsrld $3,%ymm6,%ymm7
- vpsrld $7,%ymm6,%ymm1
- vpslld $25,%ymm6,%ymm2
- vpxor %ymm1,%ymm7,%ymm7
- vpsrld $18,%ymm6,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $14,%ymm6,%ymm2
- vmovdqu 128-128(%rax),%ymm0
- vpsrld $10,%ymm0,%ymm3
-
- vpxor %ymm1,%ymm7,%ymm7
- vpsrld $17,%ymm0,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $15,%ymm0,%ymm2
- vpaddd %ymm7,%ymm5,%ymm5
- vpxor %ymm1,%ymm3,%ymm7
- vpsrld $19,%ymm0,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $13,%ymm0,%ymm2
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
- vpaddd %ymm7,%ymm5,%ymm5
- vpsrld $6,%ymm14,%ymm7
- vpslld $26,%ymm14,%ymm2
- vmovdqu %ymm5,192-128(%rax)
- vpaddd %ymm9,%ymm5,%ymm5
-
- vpsrld $11,%ymm14,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $21,%ymm14,%ymm2
- vpaddd 64(%rbp),%ymm5,%ymm5
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $25,%ymm14,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $7,%ymm14,%ymm2
- vpandn %ymm8,%ymm14,%ymm0
- vpand %ymm15,%ymm14,%ymm3
-
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $2,%ymm10,%ymm9
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $30,%ymm10,%ymm1
- vpxor %ymm3,%ymm0,%ymm0
- vpxor %ymm10,%ymm11,%ymm3
-
- vpxor %ymm1,%ymm9,%ymm9
- vpaddd %ymm7,%ymm5,%ymm5
-
- vpsrld $13,%ymm10,%ymm1
-
- vpslld $19,%ymm10,%ymm2
- vpaddd %ymm0,%ymm5,%ymm5
- vpand %ymm3,%ymm4,%ymm4
-
- vpxor %ymm1,%ymm9,%ymm7
-
- vpsrld $22,%ymm10,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $10,%ymm10,%ymm2
- vpxor %ymm4,%ymm11,%ymm9
- vpaddd %ymm5,%ymm13,%ymm13
-
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
-
- vpaddd %ymm5,%ymm9,%ymm9
- vpaddd %ymm7,%ymm9,%ymm9
- vmovdqu 256-256-128(%rbx),%ymm5
- vpaddd 0-128(%rax),%ymm6,%ymm6
-
- vpsrld $3,%ymm5,%ymm7
- vpsrld $7,%ymm5,%ymm1
- vpslld $25,%ymm5,%ymm2
- vpxor %ymm1,%ymm7,%ymm7
- vpsrld $18,%ymm5,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $14,%ymm5,%ymm2
- vmovdqu 160-128(%rax),%ymm0
- vpsrld $10,%ymm0,%ymm4
-
- vpxor %ymm1,%ymm7,%ymm7
- vpsrld $17,%ymm0,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $15,%ymm0,%ymm2
- vpaddd %ymm7,%ymm6,%ymm6
- vpxor %ymm1,%ymm4,%ymm7
- vpsrld $19,%ymm0,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $13,%ymm0,%ymm2
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
- vpaddd %ymm7,%ymm6,%ymm6
- vpsrld $6,%ymm13,%ymm7
- vpslld $26,%ymm13,%ymm2
- vmovdqu %ymm6,224-128(%rax)
- vpaddd %ymm8,%ymm6,%ymm6
-
- vpsrld $11,%ymm13,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $21,%ymm13,%ymm2
- vpaddd 96(%rbp),%ymm6,%ymm6
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $25,%ymm13,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $7,%ymm13,%ymm2
- vpandn %ymm15,%ymm13,%ymm0
- vpand %ymm14,%ymm13,%ymm4
-
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $2,%ymm9,%ymm8
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $30,%ymm9,%ymm1
- vpxor %ymm4,%ymm0,%ymm0
- vpxor %ymm9,%ymm10,%ymm4
-
- vpxor %ymm1,%ymm8,%ymm8
- vpaddd %ymm7,%ymm6,%ymm6
-
- vpsrld $13,%ymm9,%ymm1
-
- vpslld $19,%ymm9,%ymm2
- vpaddd %ymm0,%ymm6,%ymm6
- vpand %ymm4,%ymm3,%ymm3
-
- vpxor %ymm1,%ymm8,%ymm7
-
- vpsrld $22,%ymm9,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $10,%ymm9,%ymm2
- vpxor %ymm3,%ymm10,%ymm8
- vpaddd %ymm6,%ymm12,%ymm12
-
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
-
- vpaddd %ymm6,%ymm8,%ymm8
- vpaddd %ymm7,%ymm8,%ymm8
- addq $256,%rbp
- vmovdqu 288-256-128(%rbx),%ymm6
- vpaddd 32-128(%rax),%ymm5,%ymm5
-
- vpsrld $3,%ymm6,%ymm7
- vpsrld $7,%ymm6,%ymm1
- vpslld $25,%ymm6,%ymm2
- vpxor %ymm1,%ymm7,%ymm7
- vpsrld $18,%ymm6,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $14,%ymm6,%ymm2
- vmovdqu 192-128(%rax),%ymm0
- vpsrld $10,%ymm0,%ymm3
-
- vpxor %ymm1,%ymm7,%ymm7
- vpsrld $17,%ymm0,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $15,%ymm0,%ymm2
- vpaddd %ymm7,%ymm5,%ymm5
- vpxor %ymm1,%ymm3,%ymm7
- vpsrld $19,%ymm0,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $13,%ymm0,%ymm2
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
- vpaddd %ymm7,%ymm5,%ymm5
- vpsrld $6,%ymm12,%ymm7
- vpslld $26,%ymm12,%ymm2
- vmovdqu %ymm5,256-256-128(%rbx)
- vpaddd %ymm15,%ymm5,%ymm5
-
- vpsrld $11,%ymm12,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $21,%ymm12,%ymm2
- vpaddd -128(%rbp),%ymm5,%ymm5
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $25,%ymm12,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $7,%ymm12,%ymm2
- vpandn %ymm14,%ymm12,%ymm0
- vpand %ymm13,%ymm12,%ymm3
-
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $2,%ymm8,%ymm15
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $30,%ymm8,%ymm1
- vpxor %ymm3,%ymm0,%ymm0
- vpxor %ymm8,%ymm9,%ymm3
-
- vpxor %ymm1,%ymm15,%ymm15
- vpaddd %ymm7,%ymm5,%ymm5
-
- vpsrld $13,%ymm8,%ymm1
-
- vpslld $19,%ymm8,%ymm2
- vpaddd %ymm0,%ymm5,%ymm5
- vpand %ymm3,%ymm4,%ymm4
-
- vpxor %ymm1,%ymm15,%ymm7
-
- vpsrld $22,%ymm8,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $10,%ymm8,%ymm2
- vpxor %ymm4,%ymm9,%ymm15
- vpaddd %ymm5,%ymm11,%ymm11
-
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
-
- vpaddd %ymm5,%ymm15,%ymm15
- vpaddd %ymm7,%ymm15,%ymm15
- vmovdqu 320-256-128(%rbx),%ymm5
- vpaddd 64-128(%rax),%ymm6,%ymm6
-
- vpsrld $3,%ymm5,%ymm7
- vpsrld $7,%ymm5,%ymm1
- vpslld $25,%ymm5,%ymm2
- vpxor %ymm1,%ymm7,%ymm7
- vpsrld $18,%ymm5,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $14,%ymm5,%ymm2
- vmovdqu 224-128(%rax),%ymm0
- vpsrld $10,%ymm0,%ymm4
-
- vpxor %ymm1,%ymm7,%ymm7
- vpsrld $17,%ymm0,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $15,%ymm0,%ymm2
- vpaddd %ymm7,%ymm6,%ymm6
- vpxor %ymm1,%ymm4,%ymm7
- vpsrld $19,%ymm0,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $13,%ymm0,%ymm2
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
- vpaddd %ymm7,%ymm6,%ymm6
- vpsrld $6,%ymm11,%ymm7
- vpslld $26,%ymm11,%ymm2
- vmovdqu %ymm6,288-256-128(%rbx)
- vpaddd %ymm14,%ymm6,%ymm6
-
- vpsrld $11,%ymm11,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $21,%ymm11,%ymm2
- vpaddd -96(%rbp),%ymm6,%ymm6
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $25,%ymm11,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $7,%ymm11,%ymm2
- vpandn %ymm13,%ymm11,%ymm0
- vpand %ymm12,%ymm11,%ymm4
-
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $2,%ymm15,%ymm14
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $30,%ymm15,%ymm1
- vpxor %ymm4,%ymm0,%ymm0
- vpxor %ymm15,%ymm8,%ymm4
-
- vpxor %ymm1,%ymm14,%ymm14
- vpaddd %ymm7,%ymm6,%ymm6
-
- vpsrld $13,%ymm15,%ymm1
-
- vpslld $19,%ymm15,%ymm2
- vpaddd %ymm0,%ymm6,%ymm6
- vpand %ymm4,%ymm3,%ymm3
-
- vpxor %ymm1,%ymm14,%ymm7
-
- vpsrld $22,%ymm15,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $10,%ymm15,%ymm2
- vpxor %ymm3,%ymm8,%ymm14
- vpaddd %ymm6,%ymm10,%ymm10
-
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
-
- vpaddd %ymm6,%ymm14,%ymm14
- vpaddd %ymm7,%ymm14,%ymm14
- vmovdqu 352-256-128(%rbx),%ymm6
- vpaddd 96-128(%rax),%ymm5,%ymm5
-
- vpsrld $3,%ymm6,%ymm7
- vpsrld $7,%ymm6,%ymm1
- vpslld $25,%ymm6,%ymm2
- vpxor %ymm1,%ymm7,%ymm7
- vpsrld $18,%ymm6,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $14,%ymm6,%ymm2
- vmovdqu 256-256-128(%rbx),%ymm0
- vpsrld $10,%ymm0,%ymm3
-
- vpxor %ymm1,%ymm7,%ymm7
- vpsrld $17,%ymm0,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $15,%ymm0,%ymm2
- vpaddd %ymm7,%ymm5,%ymm5
- vpxor %ymm1,%ymm3,%ymm7
- vpsrld $19,%ymm0,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $13,%ymm0,%ymm2
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
- vpaddd %ymm7,%ymm5,%ymm5
- vpsrld $6,%ymm10,%ymm7
- vpslld $26,%ymm10,%ymm2
- vmovdqu %ymm5,320-256-128(%rbx)
- vpaddd %ymm13,%ymm5,%ymm5
-
- vpsrld $11,%ymm10,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $21,%ymm10,%ymm2
- vpaddd -64(%rbp),%ymm5,%ymm5
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $25,%ymm10,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $7,%ymm10,%ymm2
- vpandn %ymm12,%ymm10,%ymm0
- vpand %ymm11,%ymm10,%ymm3
-
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $2,%ymm14,%ymm13
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $30,%ymm14,%ymm1
- vpxor %ymm3,%ymm0,%ymm0
- vpxor %ymm14,%ymm15,%ymm3
-
- vpxor %ymm1,%ymm13,%ymm13
- vpaddd %ymm7,%ymm5,%ymm5
-
- vpsrld $13,%ymm14,%ymm1
-
- vpslld $19,%ymm14,%ymm2
- vpaddd %ymm0,%ymm5,%ymm5
- vpand %ymm3,%ymm4,%ymm4
-
- vpxor %ymm1,%ymm13,%ymm7
-
- vpsrld $22,%ymm14,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $10,%ymm14,%ymm2
- vpxor %ymm4,%ymm15,%ymm13
- vpaddd %ymm5,%ymm9,%ymm9
-
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
-
- vpaddd %ymm5,%ymm13,%ymm13
- vpaddd %ymm7,%ymm13,%ymm13
- vmovdqu 384-256-128(%rbx),%ymm5
- vpaddd 128-128(%rax),%ymm6,%ymm6
-
- vpsrld $3,%ymm5,%ymm7
- vpsrld $7,%ymm5,%ymm1
- vpslld $25,%ymm5,%ymm2
- vpxor %ymm1,%ymm7,%ymm7
- vpsrld $18,%ymm5,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $14,%ymm5,%ymm2
- vmovdqu 288-256-128(%rbx),%ymm0
- vpsrld $10,%ymm0,%ymm4
-
- vpxor %ymm1,%ymm7,%ymm7
- vpsrld $17,%ymm0,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $15,%ymm0,%ymm2
- vpaddd %ymm7,%ymm6,%ymm6
- vpxor %ymm1,%ymm4,%ymm7
- vpsrld $19,%ymm0,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $13,%ymm0,%ymm2
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
- vpaddd %ymm7,%ymm6,%ymm6
- vpsrld $6,%ymm9,%ymm7
- vpslld $26,%ymm9,%ymm2
- vmovdqu %ymm6,352-256-128(%rbx)
- vpaddd %ymm12,%ymm6,%ymm6
-
- vpsrld $11,%ymm9,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $21,%ymm9,%ymm2
- vpaddd -32(%rbp),%ymm6,%ymm6
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $25,%ymm9,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $7,%ymm9,%ymm2
- vpandn %ymm11,%ymm9,%ymm0
- vpand %ymm10,%ymm9,%ymm4
-
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $2,%ymm13,%ymm12
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $30,%ymm13,%ymm1
- vpxor %ymm4,%ymm0,%ymm0
- vpxor %ymm13,%ymm14,%ymm4
-
- vpxor %ymm1,%ymm12,%ymm12
- vpaddd %ymm7,%ymm6,%ymm6
-
- vpsrld $13,%ymm13,%ymm1
-
- vpslld $19,%ymm13,%ymm2
- vpaddd %ymm0,%ymm6,%ymm6
- vpand %ymm4,%ymm3,%ymm3
-
- vpxor %ymm1,%ymm12,%ymm7
-
- vpsrld $22,%ymm13,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $10,%ymm13,%ymm2
- vpxor %ymm3,%ymm14,%ymm12
- vpaddd %ymm6,%ymm8,%ymm8
-
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
-
- vpaddd %ymm6,%ymm12,%ymm12
- vpaddd %ymm7,%ymm12,%ymm12
- vmovdqu 416-256-128(%rbx),%ymm6
- vpaddd 160-128(%rax),%ymm5,%ymm5
-
- vpsrld $3,%ymm6,%ymm7
- vpsrld $7,%ymm6,%ymm1
- vpslld $25,%ymm6,%ymm2
- vpxor %ymm1,%ymm7,%ymm7
- vpsrld $18,%ymm6,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $14,%ymm6,%ymm2
- vmovdqu 320-256-128(%rbx),%ymm0
- vpsrld $10,%ymm0,%ymm3
-
- vpxor %ymm1,%ymm7,%ymm7
- vpsrld $17,%ymm0,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $15,%ymm0,%ymm2
- vpaddd %ymm7,%ymm5,%ymm5
- vpxor %ymm1,%ymm3,%ymm7
- vpsrld $19,%ymm0,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $13,%ymm0,%ymm2
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
- vpaddd %ymm7,%ymm5,%ymm5
- vpsrld $6,%ymm8,%ymm7
- vpslld $26,%ymm8,%ymm2
- vmovdqu %ymm5,384-256-128(%rbx)
- vpaddd %ymm11,%ymm5,%ymm5
-
- vpsrld $11,%ymm8,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $21,%ymm8,%ymm2
- vpaddd 0(%rbp),%ymm5,%ymm5
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $25,%ymm8,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $7,%ymm8,%ymm2
- vpandn %ymm10,%ymm8,%ymm0
- vpand %ymm9,%ymm8,%ymm3
-
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $2,%ymm12,%ymm11
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $30,%ymm12,%ymm1
- vpxor %ymm3,%ymm0,%ymm0
- vpxor %ymm12,%ymm13,%ymm3
-
- vpxor %ymm1,%ymm11,%ymm11
- vpaddd %ymm7,%ymm5,%ymm5
-
- vpsrld $13,%ymm12,%ymm1
-
- vpslld $19,%ymm12,%ymm2
- vpaddd %ymm0,%ymm5,%ymm5
- vpand %ymm3,%ymm4,%ymm4
-
- vpxor %ymm1,%ymm11,%ymm7
-
- vpsrld $22,%ymm12,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $10,%ymm12,%ymm2
- vpxor %ymm4,%ymm13,%ymm11
- vpaddd %ymm5,%ymm15,%ymm15
-
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
-
- vpaddd %ymm5,%ymm11,%ymm11
- vpaddd %ymm7,%ymm11,%ymm11
- vmovdqu 448-256-128(%rbx),%ymm5
- vpaddd 192-128(%rax),%ymm6,%ymm6
-
- vpsrld $3,%ymm5,%ymm7
- vpsrld $7,%ymm5,%ymm1
- vpslld $25,%ymm5,%ymm2
- vpxor %ymm1,%ymm7,%ymm7
- vpsrld $18,%ymm5,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $14,%ymm5,%ymm2
- vmovdqu 352-256-128(%rbx),%ymm0
- vpsrld $10,%ymm0,%ymm4
-
- vpxor %ymm1,%ymm7,%ymm7
- vpsrld $17,%ymm0,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $15,%ymm0,%ymm2
- vpaddd %ymm7,%ymm6,%ymm6
- vpxor %ymm1,%ymm4,%ymm7
- vpsrld $19,%ymm0,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $13,%ymm0,%ymm2
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
- vpaddd %ymm7,%ymm6,%ymm6
- vpsrld $6,%ymm15,%ymm7
- vpslld $26,%ymm15,%ymm2
- vmovdqu %ymm6,416-256-128(%rbx)
- vpaddd %ymm10,%ymm6,%ymm6
-
- vpsrld $11,%ymm15,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $21,%ymm15,%ymm2
- vpaddd 32(%rbp),%ymm6,%ymm6
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $25,%ymm15,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $7,%ymm15,%ymm2
- vpandn %ymm9,%ymm15,%ymm0
- vpand %ymm8,%ymm15,%ymm4
-
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $2,%ymm11,%ymm10
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $30,%ymm11,%ymm1
- vpxor %ymm4,%ymm0,%ymm0
- vpxor %ymm11,%ymm12,%ymm4
-
- vpxor %ymm1,%ymm10,%ymm10
- vpaddd %ymm7,%ymm6,%ymm6
-
- vpsrld $13,%ymm11,%ymm1
-
- vpslld $19,%ymm11,%ymm2
- vpaddd %ymm0,%ymm6,%ymm6
- vpand %ymm4,%ymm3,%ymm3
-
- vpxor %ymm1,%ymm10,%ymm7
-
- vpsrld $22,%ymm11,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $10,%ymm11,%ymm2
- vpxor %ymm3,%ymm12,%ymm10
- vpaddd %ymm6,%ymm14,%ymm14
-
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
-
- vpaddd %ymm6,%ymm10,%ymm10
- vpaddd %ymm7,%ymm10,%ymm10
- vmovdqu 480-256-128(%rbx),%ymm6
- vpaddd 224-128(%rax),%ymm5,%ymm5
-
- vpsrld $3,%ymm6,%ymm7
- vpsrld $7,%ymm6,%ymm1
- vpslld $25,%ymm6,%ymm2
- vpxor %ymm1,%ymm7,%ymm7
- vpsrld $18,%ymm6,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $14,%ymm6,%ymm2
- vmovdqu 384-256-128(%rbx),%ymm0
- vpsrld $10,%ymm0,%ymm3
-
- vpxor %ymm1,%ymm7,%ymm7
- vpsrld $17,%ymm0,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $15,%ymm0,%ymm2
- vpaddd %ymm7,%ymm5,%ymm5
- vpxor %ymm1,%ymm3,%ymm7
- vpsrld $19,%ymm0,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $13,%ymm0,%ymm2
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
- vpaddd %ymm7,%ymm5,%ymm5
- vpsrld $6,%ymm14,%ymm7
- vpslld $26,%ymm14,%ymm2
- vmovdqu %ymm5,448-256-128(%rbx)
- vpaddd %ymm9,%ymm5,%ymm5
-
- vpsrld $11,%ymm14,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $21,%ymm14,%ymm2
- vpaddd 64(%rbp),%ymm5,%ymm5
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $25,%ymm14,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $7,%ymm14,%ymm2
- vpandn %ymm8,%ymm14,%ymm0
- vpand %ymm15,%ymm14,%ymm3
-
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $2,%ymm10,%ymm9
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $30,%ymm10,%ymm1
- vpxor %ymm3,%ymm0,%ymm0
- vpxor %ymm10,%ymm11,%ymm3
-
- vpxor %ymm1,%ymm9,%ymm9
- vpaddd %ymm7,%ymm5,%ymm5
-
- vpsrld $13,%ymm10,%ymm1
-
- vpslld $19,%ymm10,%ymm2
- vpaddd %ymm0,%ymm5,%ymm5
- vpand %ymm3,%ymm4,%ymm4
-
- vpxor %ymm1,%ymm9,%ymm7
-
- vpsrld $22,%ymm10,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $10,%ymm10,%ymm2
- vpxor %ymm4,%ymm11,%ymm9
- vpaddd %ymm5,%ymm13,%ymm13
-
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
-
- vpaddd %ymm5,%ymm9,%ymm9
- vpaddd %ymm7,%ymm9,%ymm9
- vmovdqu 0-128(%rax),%ymm5
- vpaddd 256-256-128(%rbx),%ymm6,%ymm6
-
- vpsrld $3,%ymm5,%ymm7
- vpsrld $7,%ymm5,%ymm1
- vpslld $25,%ymm5,%ymm2
- vpxor %ymm1,%ymm7,%ymm7
- vpsrld $18,%ymm5,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $14,%ymm5,%ymm2
- vmovdqu 416-256-128(%rbx),%ymm0
- vpsrld $10,%ymm0,%ymm4
-
- vpxor %ymm1,%ymm7,%ymm7
- vpsrld $17,%ymm0,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $15,%ymm0,%ymm2
- vpaddd %ymm7,%ymm6,%ymm6
- vpxor %ymm1,%ymm4,%ymm7
- vpsrld $19,%ymm0,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $13,%ymm0,%ymm2
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
- vpaddd %ymm7,%ymm6,%ymm6
- vpsrld $6,%ymm13,%ymm7
- vpslld $26,%ymm13,%ymm2
- vmovdqu %ymm6,480-256-128(%rbx)
- vpaddd %ymm8,%ymm6,%ymm6
-
- vpsrld $11,%ymm13,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
- vpslld $21,%ymm13,%ymm2
- vpaddd 96(%rbp),%ymm6,%ymm6
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $25,%ymm13,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $7,%ymm13,%ymm2
- vpandn %ymm15,%ymm13,%ymm0
- vpand %ymm14,%ymm13,%ymm4
-
- vpxor %ymm1,%ymm7,%ymm7
-
- vpsrld $2,%ymm9,%ymm8
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $30,%ymm9,%ymm1
- vpxor %ymm4,%ymm0,%ymm0
- vpxor %ymm9,%ymm10,%ymm4
-
- vpxor %ymm1,%ymm8,%ymm8
- vpaddd %ymm7,%ymm6,%ymm6
-
- vpsrld $13,%ymm9,%ymm1
-
- vpslld $19,%ymm9,%ymm2
- vpaddd %ymm0,%ymm6,%ymm6
- vpand %ymm4,%ymm3,%ymm3
-
- vpxor %ymm1,%ymm8,%ymm7
-
- vpsrld $22,%ymm9,%ymm1
- vpxor %ymm2,%ymm7,%ymm7
-
- vpslld $10,%ymm9,%ymm2
- vpxor %ymm3,%ymm10,%ymm8
- vpaddd %ymm6,%ymm12,%ymm12
-
- vpxor %ymm1,%ymm7,%ymm7
- vpxor %ymm2,%ymm7,%ymm7
-
- vpaddd %ymm6,%ymm8,%ymm8
- vpaddd %ymm7,%ymm8,%ymm8
- addq $256,%rbp
- decl %ecx
- jnz .Loop_16_xx_avx2
-
- movl $1,%ecx
- leaq 512(%rsp),%rbx
- leaq K256+128(%rip),%rbp
- cmpl 0(%rbx),%ecx
- cmovgeq %rbp,%r12
- cmpl 4(%rbx),%ecx
- cmovgeq %rbp,%r13
- cmpl 8(%rbx),%ecx
- cmovgeq %rbp,%r14
- cmpl 12(%rbx),%ecx
- cmovgeq %rbp,%r15
- cmpl 16(%rbx),%ecx
- cmovgeq %rbp,%r8
- cmpl 20(%rbx),%ecx
- cmovgeq %rbp,%r9
- cmpl 24(%rbx),%ecx
- cmovgeq %rbp,%r10
- cmpl 28(%rbx),%ecx
- cmovgeq %rbp,%r11
- vmovdqa (%rbx),%ymm7
- vpxor %ymm0,%ymm0,%ymm0
- vmovdqa %ymm7,%ymm6
- vpcmpgtd %ymm0,%ymm6,%ymm6
- vpaddd %ymm6,%ymm7,%ymm7
-
- vmovdqu 0-128(%rdi),%ymm0
- vpand %ymm6,%ymm8,%ymm8
- vmovdqu 32-128(%rdi),%ymm1
- vpand %ymm6,%ymm9,%ymm9
- vmovdqu 64-128(%rdi),%ymm2
- vpand %ymm6,%ymm10,%ymm10
- vmovdqu 96-128(%rdi),%ymm5
- vpand %ymm6,%ymm11,%ymm11
- vpaddd %ymm0,%ymm8,%ymm8
- vmovdqu 128-128(%rdi),%ymm0
- vpand %ymm6,%ymm12,%ymm12
- vpaddd %ymm1,%ymm9,%ymm9
- vmovdqu 160-128(%rdi),%ymm1
- vpand %ymm6,%ymm13,%ymm13
- vpaddd %ymm2,%ymm10,%ymm10
- vmovdqu 192-128(%rdi),%ymm2
- vpand %ymm6,%ymm14,%ymm14
- vpaddd %ymm5,%ymm11,%ymm11
- vmovdqu 224-128(%rdi),%ymm5
- vpand %ymm6,%ymm15,%ymm15
- vpaddd %ymm0,%ymm12,%ymm12
- vpaddd %ymm1,%ymm13,%ymm13
- vmovdqu %ymm8,0-128(%rdi)
- vpaddd %ymm2,%ymm14,%ymm14
- vmovdqu %ymm9,32-128(%rdi)
- vpaddd %ymm5,%ymm15,%ymm15
- vmovdqu %ymm10,64-128(%rdi)
- vmovdqu %ymm11,96-128(%rdi)
- vmovdqu %ymm12,128-128(%rdi)
- vmovdqu %ymm13,160-128(%rdi)
- vmovdqu %ymm14,192-128(%rdi)
- vmovdqu %ymm15,224-128(%rdi)
-
- vmovdqu %ymm7,(%rbx)
- leaq 256+128(%rsp),%rbx
- vmovdqu .Lpbswap(%rip),%ymm6
- decl %edx
- jnz .Loop_avx2
-
-
-
-
-
-
-
-.Ldone_avx2:
- movq 544(%rsp),%rax
-.cfi_def_cfa %rax,8
- vzeroupper
- movq -48(%rax),%r15
-.cfi_restore %r15
- movq -40(%rax),%r14
-.cfi_restore %r14
- movq -32(%rax),%r13
-.cfi_restore %r13
- movq -24(%rax),%r12
-.cfi_restore %r12
- movq -16(%rax),%rbp
-.cfi_restore %rbp
- movq -8(%rax),%rbx
-.cfi_restore %rbx
- leaq (%rax),%rsp
-.cfi_def_cfa_register %rsp
-.Lepilogue_avx2:
- .byte 0xf3,0xc3
-.cfi_endproc
-.size sha256_multi_block_avx2,.-sha256_multi_block_avx2
.align 256
K256:
.long 1116352408,1116352408,1116352408,1116352408
diff --git a/secure/lib/libcrypto/amd64/sha256-x86_64.S b/secure/lib/libcrypto/amd64/sha256-x86_64.S
index 844df765f398e..91b3ead8976ab 100644
--- a/secure/lib/libcrypto/amd64/sha256-x86_64.S
+++ b/secure/lib/libcrypto/amd64/sha256-x86_64.S
@@ -14,14 +14,6 @@ sha256_block_data_order:
movl 8(%r11),%r11d
testl $536870912,%r11d
jnz _shaext_shortcut
- andl $296,%r11d
- cmpl $296,%r11d
- je .Lavx2_shortcut
- andl $1073741824,%r9d
- andl $268435968,%r10d
- orl %r9d,%r10d
- cmpl $1342177792,%r10d
- je .Lavx_shortcut
testl $512,%r10d
jnz .Lssse3_shortcut
movq %rsp,%rax
@@ -1777,6 +1769,7 @@ K256:
.align 64
sha256_block_data_order_shaext:
_shaext_shortcut:
+.cfi_startproc
leaq K256+128(%rip),%rcx
movdqu (%rdi),%xmm1
movdqu 16(%rdi),%xmm2
@@ -1979,6 +1972,7 @@ _shaext_shortcut:
movdqu %xmm1,(%rdi)
movdqu %xmm2,16(%rdi)
.byte 0xf3,0xc3
+.cfi_endproc
.size sha256_block_data_order_shaext,.-sha256_block_data_order_shaext
.type sha256_block_data_order_ssse3,@function
.align 64
@@ -3093,2340 +3087,3 @@ sha256_block_data_order_ssse3:
.byte 0xf3,0xc3
.cfi_endproc
.size sha256_block_data_order_ssse3,.-sha256_block_data_order_ssse3
-.type sha256_block_data_order_avx,@function
-.align 64
-sha256_block_data_order_avx:
-.cfi_startproc
-.Lavx_shortcut:
- movq %rsp,%rax
-.cfi_def_cfa_register %rax
- pushq %rbx
-.cfi_offset %rbx,-16
- pushq %rbp
-.cfi_offset %rbp,-24
- pushq %r12
-.cfi_offset %r12,-32
- pushq %r13
-.cfi_offset %r13,-40
- pushq %r14
-.cfi_offset %r14,-48
- pushq %r15
-.cfi_offset %r15,-56
- shlq $4,%rdx
- subq $96,%rsp
- leaq (%rsi,%rdx,4),%rdx
- andq $-64,%rsp
- movq %rdi,64+0(%rsp)
- movq %rsi,64+8(%rsp)
- movq %rdx,64+16(%rsp)
- movq %rax,88(%rsp)
-.cfi_escape 0x0f,0x06,0x77,0xd8,0x00,0x06,0x23,0x08
-.Lprologue_avx:
-
- vzeroupper
- movl 0(%rdi),%eax
- movl 4(%rdi),%ebx
- movl 8(%rdi),%ecx
- movl 12(%rdi),%edx
- movl 16(%rdi),%r8d
- movl 20(%rdi),%r9d
- movl 24(%rdi),%r10d
- movl 28(%rdi),%r11d
- vmovdqa K256+512+32(%rip),%xmm8
- vmovdqa K256+512+64(%rip),%xmm9
- jmp .Lloop_avx
-.align 16
-.Lloop_avx:
- vmovdqa K256+512(%rip),%xmm7
- vmovdqu 0(%rsi),%xmm0
- vmovdqu 16(%rsi),%xmm1
- vmovdqu 32(%rsi),%xmm2
- vmovdqu 48(%rsi),%xmm3
- vpshufb %xmm7,%xmm0,%xmm0
- leaq K256(%rip),%rbp
- vpshufb %xmm7,%xmm1,%xmm1
- vpshufb %xmm7,%xmm2,%xmm2
- vpaddd 0(%rbp),%xmm0,%xmm4
- vpshufb %xmm7,%xmm3,%xmm3
- vpaddd 32(%rbp),%xmm1,%xmm5
- vpaddd 64(%rbp),%xmm2,%xmm6
- vpaddd 96(%rbp),%xmm3,%xmm7
- vmovdqa %xmm4,0(%rsp)
- movl %eax,%r14d
- vmovdqa %xmm5,16(%rsp)
- movl %ebx,%edi
- vmovdqa %xmm6,32(%rsp)
- xorl %ecx,%edi
- vmovdqa %xmm7,48(%rsp)
- movl %r8d,%r13d
- jmp .Lavx_00_47
-
-.align 16
-.Lavx_00_47:
- subq $-128,%rbp
- vpalignr $4,%xmm0,%xmm1,%xmm4
- shrdl $14,%r13d,%r13d
- movl %r14d,%eax
- movl %r9d,%r12d
- vpalignr $4,%xmm2,%xmm3,%xmm7
- shrdl $9,%r14d,%r14d
- xorl %r8d,%r13d
- xorl %r10d,%r12d
- vpsrld $7,%xmm4,%xmm6
- shrdl $5,%r13d,%r13d
- xorl %eax,%r14d
- andl %r8d,%r12d
- vpaddd %xmm7,%xmm0,%xmm0
- xorl %r8d,%r13d
- addl 0(%rsp),%r11d
- movl %eax,%r15d
- vpsrld $3,%xmm4,%xmm7
- xorl %r10d,%r12d
- shrdl $11,%r14d,%r14d
- xorl %ebx,%r15d
- vpslld $14,%xmm4,%xmm5
- addl %r12d,%r11d
- shrdl $6,%r13d,%r13d
- andl %r15d,%edi
- vpxor %xmm6,%xmm7,%xmm4
- xorl %eax,%r14d
- addl %r13d,%r11d
- xorl %ebx,%edi
- vpshufd $250,%xmm3,%xmm7
- shrdl $2,%r14d,%r14d
- addl %r11d,%edx
- addl %edi,%r11d
- vpsrld $11,%xmm6,%xmm6
- movl %edx,%r13d
- addl %r11d,%r14d
- shrdl $14,%r13d,%r13d
- vpxor %xmm5,%xmm4,%xmm4
- movl %r14d,%r11d
- movl %r8d,%r12d
- shrdl $9,%r14d,%r14d
- vpslld $11,%xmm5,%xmm5
- xorl %edx,%r13d
- xorl %r9d,%r12d
- shrdl $5,%r13d,%r13d
- vpxor %xmm6,%xmm4,%xmm4
- xorl %r11d,%r14d
- andl %edx,%r12d
- xorl %edx,%r13d
- vpsrld $10,%xmm7,%xmm6
- addl 4(%rsp),%r10d
- movl %r11d,%edi
- xorl %r9d,%r12d
- vpxor %xmm5,%xmm4,%xmm4
- shrdl $11,%r14d,%r14d
- xorl %eax,%edi
- addl %r12d,%r10d
- vpsrlq $17,%xmm7,%xmm7
- shrdl $6,%r13d,%r13d
- andl %edi,%r15d
- xorl %r11d,%r14d
- vpaddd %xmm4,%xmm0,%xmm0
- addl %r13d,%r10d
- xorl %eax,%r15d
- shrdl $2,%r14d,%r14d
- vpxor %xmm7,%xmm6,%xmm6
- addl %r10d,%ecx
- addl %r15d,%r10d
- movl %ecx,%r13d
- vpsrlq $2,%xmm7,%xmm7
- addl %r10d,%r14d
- shrdl $14,%r13d,%r13d
- movl %r14d,%r10d
- vpxor %xmm7,%xmm6,%xmm6
- movl %edx,%r12d
- shrdl $9,%r14d,%r14d
- xorl %ecx,%r13d
- vpshufb %xmm8,%xmm6,%xmm6
- xorl %r8d,%r12d
- shrdl $5,%r13d,%r13d
- xorl %r10d,%r14d
- vpaddd %xmm6,%xmm0,%xmm0
- andl %ecx,%r12d
- xorl %ecx,%r13d
- addl 8(%rsp),%r9d
- vpshufd $80,%xmm0,%xmm7
- movl %r10d,%r15d
- xorl %r8d,%r12d
- shrdl $11,%r14d,%r14d
- vpsrld $10,%xmm7,%xmm6
- xorl %r11d,%r15d
- addl %r12d,%r9d
- shrdl $6,%r13d,%r13d
- vpsrlq $17,%xmm7,%xmm7
- andl %r15d,%edi
- xorl %r10d,%r14d
- addl %r13d,%r9d
- vpxor %xmm7,%xmm6,%xmm6
- xorl %r11d,%edi
- shrdl $2,%r14d,%r14d
- addl %r9d,%ebx
- vpsrlq $2,%xmm7,%xmm7
- addl %edi,%r9d
- movl %ebx,%r13d
- addl %r9d,%r14d
- vpxor %xmm7,%xmm6,%xmm6
- shrdl $14,%r13d,%r13d
- movl %r14d,%r9d
- movl %ecx,%r12d
- vpshufb %xmm9,%xmm6,%xmm6
- shrdl $9,%r14d,%r14d
- xorl %ebx,%r13d
- xorl %edx,%r12d
- vpaddd %xmm6,%xmm0,%xmm0
- shrdl $5,%r13d,%r13d
- xorl %r9d,%r14d
- andl %ebx,%r12d
- vpaddd 0(%rbp),%xmm0,%xmm6
- xorl %ebx,%r13d
- addl 12(%rsp),%r8d
- movl %r9d,%edi
- xorl %edx,%r12d
- shrdl $11,%r14d,%r14d
- xorl %r10d,%edi
- addl %r12d,%r8d
- shrdl $6,%r13d,%r13d
- andl %edi,%r15d
- xorl %r9d,%r14d
- addl %r13d,%r8d
- xorl %r10d,%r15d
- shrdl $2,%r14d,%r14d
- addl %r8d,%eax
- addl %r15d,%r8d
- movl %eax,%r13d
- addl %r8d,%r14d
- vmovdqa %xmm6,0(%rsp)
- vpalignr $4,%xmm1,%xmm2,%xmm4
- shrdl $14,%r13d,%r13d
- movl %r14d,%r8d
- movl %ebx,%r12d
- vpalignr $4,%xmm3,%xmm0,%xmm7
- shrdl $9,%r14d,%r14d
- xorl %eax,%r13d
- xorl %ecx,%r12d
- vpsrld $7,%xmm4,%xmm6
- shrdl $5,%r13d,%r13d
- xorl %r8d,%r14d
- andl %eax,%r12d
- vpaddd %xmm7,%xmm1,%xmm1
- xorl %eax,%r13d
- addl 16(%rsp),%edx
- movl %r8d,%r15d
- vpsrld $3,%xmm4,%xmm7
- xorl %ecx,%r12d
- shrdl $11,%r14d,%r14d
- xorl %r9d,%r15d
- vpslld $14,%xmm4,%xmm5
- addl %r12d,%edx
- shrdl $6,%r13d,%r13d
- andl %r15d,%edi
- vpxor %xmm6,%xmm7,%xmm4
- xorl %r8d,%r14d
- addl %r13d,%edx
- xorl %r9d,%edi
- vpshufd $250,%xmm0,%xmm7
- shrdl $2,%r14d,%r14d
- addl %edx,%r11d
- addl %edi,%edx
- vpsrld $11,%xmm6,%xmm6
- movl %r11d,%r13d
- addl %edx,%r14d
- shrdl $14,%r13d,%r13d
- vpxor %xmm5,%xmm4,%xmm4
- movl %r14d,%edx
- movl %eax,%r12d
- shrdl $9,%r14d,%r14d
- vpslld $11,%xmm5,%xmm5
- xorl %r11d,%r13d
- xorl %ebx,%r12d
- shrdl $5,%r13d,%r13d
- vpxor %xmm6,%xmm4,%xmm4
- xorl %edx,%r14d
- andl %r11d,%r12d
- xorl %r11d,%r13d
- vpsrld $10,%xmm7,%xmm6
- addl 20(%rsp),%ecx
- movl %edx,%edi
- xorl %ebx,%r12d
- vpxor %xmm5,%xmm4,%xmm4
- shrdl $11,%r14d,%r14d
- xorl %r8d,%edi
- addl %r12d,%ecx
- vpsrlq $17,%xmm7,%xmm7
- shrdl $6,%r13d,%r13d
- andl %edi,%r15d
- xorl %edx,%r14d
- vpaddd %xmm4,%xmm1,%xmm1
- addl %r13d,%ecx
- xorl %r8d,%r15d
- shrdl $2,%r14d,%r14d
- vpxor %xmm7,%xmm6,%xmm6
- addl %ecx,%r10d
- addl %r15d,%ecx
- movl %r10d,%r13d
- vpsrlq $2,%xmm7,%xmm7
- addl %ecx,%r14d
- shrdl $14,%r13d,%r13d
- movl %r14d,%ecx
- vpxor %xmm7,%xmm6,%xmm6
- movl %r11d,%r12d
- shrdl $9,%r14d,%r14d
- xorl %r10d,%r13d
- vpshufb %xmm8,%xmm6,%xmm6
- xorl %eax,%r12d
- shrdl $5,%r13d,%r13d
- xorl %ecx,%r14d
- vpaddd %xmm6,%xmm1,%xmm1
- andl %r10d,%r12d
- xorl %r10d,%r13d
- addl 24(%rsp),%ebx
- vpshufd $80,%xmm1,%xmm7
- movl %ecx,%r15d
- xorl %eax,%r12d
- shrdl $11,%r14d,%r14d
- vpsrld $10,%xmm7,%xmm6
- xorl %edx,%r15d
- addl %r12d,%ebx
- shrdl $6,%r13d,%r13d
- vpsrlq $17,%xmm7,%xmm7
- andl %r15d,%edi
- xorl %ecx,%r14d
- addl %r13d,%ebx
- vpxor %xmm7,%xmm6,%xmm6
- xorl %edx,%edi
- shrdl $2,%r14d,%r14d
- addl %ebx,%r9d
- vpsrlq $2,%xmm7,%xmm7
- addl %edi,%ebx
- movl %r9d,%r13d
- addl %ebx,%r14d
- vpxor %xmm7,%xmm6,%xmm6
- shrdl $14,%r13d,%r13d
- movl %r14d,%ebx
- movl %r10d,%r12d
- vpshufb %xmm9,%xmm6,%xmm6
- shrdl $9,%r14d,%r14d
- xorl %r9d,%r13d
- xorl %r11d,%r12d
- vpaddd %xmm6,%xmm1,%xmm1
- shrdl $5,%r13d,%r13d
- xorl %ebx,%r14d
- andl %r9d,%r12d
- vpaddd 32(%rbp),%xmm1,%xmm6
- xorl %r9d,%r13d
- addl 28(%rsp),%eax
- movl %ebx,%edi
- xorl %r11d,%r12d
- shrdl $11,%r14d,%r14d
- xorl %ecx,%edi
- addl %r12d,%eax
- shrdl $6,%r13d,%r13d
- andl %edi,%r15d
- xorl %ebx,%r14d
- addl %r13d,%eax
- xorl %ecx,%r15d
- shrdl $2,%r14d,%r14d
- addl %eax,%r8d
- addl %r15d,%eax
- movl %r8d,%r13d
- addl %eax,%r14d
- vmovdqa %xmm6,16(%rsp)
- vpalignr $4,%xmm2,%xmm3,%xmm4
- shrdl $14,%r13d,%r13d
- movl %r14d,%eax
- movl %r9d,%r12d
- vpalignr $4,%xmm0,%xmm1,%xmm7
- shrdl $9,%r14d,%r14d
- xorl %r8d,%r13d
- xorl %r10d,%r12d
- vpsrld $7,%xmm4,%xmm6
- shrdl $5,%r13d,%r13d
- xorl %eax,%r14d
- andl %r8d,%r12d
- vpaddd %xmm7,%xmm2,%xmm2
- xorl %r8d,%r13d
- addl 32(%rsp),%r11d
- movl %eax,%r15d
- vpsrld $3,%xmm4,%xmm7
- xorl %r10d,%r12d
- shrdl $11,%r14d,%r14d
- xorl %ebx,%r15d
- vpslld $14,%xmm4,%xmm5
- addl %r12d,%r11d
- shrdl $6,%r13d,%r13d
- andl %r15d,%edi
- vpxor %xmm6,%xmm7,%xmm4
- xorl %eax,%r14d
- addl %r13d,%r11d
- xorl %ebx,%edi
- vpshufd $250,%xmm1,%xmm7
- shrdl $2,%r14d,%r14d
- addl %r11d,%edx
- addl %edi,%r11d
- vpsrld $11,%xmm6,%xmm6
- movl %edx,%r13d
- addl %r11d,%r14d
- shrdl $14,%r13d,%r13d
- vpxor %xmm5,%xmm4,%xmm4
- movl %r14d,%r11d
- movl %r8d,%r12d
- shrdl $9,%r14d,%r14d
- vpslld $11,%xmm5,%xmm5
- xorl %edx,%r13d
- xorl %r9d,%r12d
- shrdl $5,%r13d,%r13d
- vpxor %xmm6,%xmm4,%xmm4
- xorl %r11d,%r14d
- andl %edx,%r12d
- xorl %edx,%r13d
- vpsrld $10,%xmm7,%xmm6
- addl 36(%rsp),%r10d
- movl %r11d,%edi
- xorl %r9d,%r12d
- vpxor %xmm5,%xmm4,%xmm4
- shrdl $11,%r14d,%r14d
- xorl %eax,%edi
- addl %r12d,%r10d
- vpsrlq $17,%xmm7,%xmm7
- shrdl $6,%r13d,%r13d
- andl %edi,%r15d
- xorl %r11d,%r14d
- vpaddd %xmm4,%xmm2,%xmm2
- addl %r13d,%r10d
- xorl %eax,%r15d
- shrdl $2,%r14d,%r14d
- vpxor %xmm7,%xmm6,%xmm6
- addl %r10d,%ecx
- addl %r15d,%r10d
- movl %ecx,%r13d
- vpsrlq $2,%xmm7,%xmm7
- addl %r10d,%r14d
- shrdl $14,%r13d,%r13d
- movl %r14d,%r10d
- vpxor %xmm7,%xmm6,%xmm6
- movl %edx,%r12d
- shrdl $9,%r14d,%r14d
- xorl %ecx,%r13d
- vpshufb %xmm8,%xmm6,%xmm6
- xorl %r8d,%r12d
- shrdl $5,%r13d,%r13d
- xorl %r10d,%r14d
- vpaddd %xmm6,%xmm2,%xmm2
- andl %ecx,%r12d
- xorl %ecx,%r13d
- addl 40(%rsp),%r9d
- vpshufd $80,%xmm2,%xmm7
- movl %r10d,%r15d
- xorl %r8d,%r12d
- shrdl $11,%r14d,%r14d
- vpsrld $10,%xmm7,%xmm6
- xorl %r11d,%r15d
- addl %r12d,%r9d
- shrdl $6,%r13d,%r13d
- vpsrlq $17,%xmm7,%xmm7
- andl %r15d,%edi
- xorl %r10d,%r14d
- addl %r13d,%r9d
- vpxor %xmm7,%xmm6,%xmm6
- xorl %r11d,%edi
- shrdl $2,%r14d,%r14d
- addl %r9d,%ebx
- vpsrlq $2,%xmm7,%xmm7
- addl %edi,%r9d
- movl %ebx,%r13d
- addl %r9d,%r14d
- vpxor %xmm7,%xmm6,%xmm6
- shrdl $14,%r13d,%r13d
- movl %r14d,%r9d
- movl %ecx,%r12d
- vpshufb %xmm9,%xmm6,%xmm6
- shrdl $9,%r14d,%r14d
- xorl %ebx,%r13d
- xorl %edx,%r12d
- vpaddd %xmm6,%xmm2,%xmm2
- shrdl $5,%r13d,%r13d
- xorl %r9d,%r14d
- andl %ebx,%r12d
- vpaddd 64(%rbp),%xmm2,%xmm6
- xorl %ebx,%r13d
- addl 44(%rsp),%r8d
- movl %r9d,%edi
- xorl %edx,%r12d
- shrdl $11,%r14d,%r14d
- xorl %r10d,%edi
- addl %r12d,%r8d
- shrdl $6,%r13d,%r13d
- andl %edi,%r15d
- xorl %r9d,%r14d
- addl %r13d,%r8d
- xorl %r10d,%r15d
- shrdl $2,%r14d,%r14d
- addl %r8d,%eax
- addl %r15d,%r8d
- movl %eax,%r13d
- addl %r8d,%r14d
- vmovdqa %xmm6,32(%rsp)
- vpalignr $4,%xmm3,%xmm0,%xmm4
- shrdl $14,%r13d,%r13d
- movl %r14d,%r8d
- movl %ebx,%r12d
- vpalignr $4,%xmm1,%xmm2,%xmm7
- shrdl $9,%r14d,%r14d
- xorl %eax,%r13d
- xorl %ecx,%r12d
- vpsrld $7,%xmm4,%xmm6
- shrdl $5,%r13d,%r13d
- xorl %r8d,%r14d
- andl %eax,%r12d
- vpaddd %xmm7,%xmm3,%xmm3
- xorl %eax,%r13d
- addl 48(%rsp),%edx
- movl %r8d,%r15d
- vpsrld $3,%xmm4,%xmm7
- xorl %ecx,%r12d
- shrdl $11,%r14d,%r14d
- xorl %r9d,%r15d
- vpslld $14,%xmm4,%xmm5
- addl %r12d,%edx
- shrdl $6,%r13d,%r13d
- andl %r15d,%edi
- vpxor %xmm6,%xmm7,%xmm4
- xorl %r8d,%r14d
- addl %r13d,%edx
- xorl %r9d,%edi
- vpshufd $250,%xmm2,%xmm7
- shrdl $2,%r14d,%r14d
- addl %edx,%r11d
- addl %edi,%edx
- vpsrld $11,%xmm6,%xmm6
- movl %r11d,%r13d
- addl %edx,%r14d
- shrdl $14,%r13d,%r13d
- vpxor %xmm5,%xmm4,%xmm4
- movl %r14d,%edx
- movl %eax,%r12d
- shrdl $9,%r14d,%r14d
- vpslld $11,%xmm5,%xmm5
- xorl %r11d,%r13d
- xorl %ebx,%r12d
- shrdl $5,%r13d,%r13d
- vpxor %xmm6,%xmm4,%xmm4
- xorl %edx,%r14d
- andl %r11d,%r12d
- xorl %r11d,%r13d
- vpsrld $10,%xmm7,%xmm6
- addl 52(%rsp),%ecx
- movl %edx,%edi
- xorl %ebx,%r12d
- vpxor %xmm5,%xmm4,%xmm4
- shrdl $11,%r14d,%r14d
- xorl %r8d,%edi
- addl %r12d,%ecx
- vpsrlq $17,%xmm7,%xmm7
- shrdl $6,%r13d,%r13d
- andl %edi,%r15d
- xorl %edx,%r14d
- vpaddd %xmm4,%xmm3,%xmm3
- addl %r13d,%ecx
- xorl %r8d,%r15d
- shrdl $2,%r14d,%r14d
- vpxor %xmm7,%xmm6,%xmm6
- addl %ecx,%r10d
- addl %r15d,%ecx
- movl %r10d,%r13d
- vpsrlq $2,%xmm7,%xmm7
- addl %ecx,%r14d
- shrdl $14,%r13d,%r13d
- movl %r14d,%ecx
- vpxor %xmm7,%xmm6,%xmm6
- movl %r11d,%r12d
- shrdl $9,%r14d,%r14d
- xorl %r10d,%r13d
- vpshufb %xmm8,%xmm6,%xmm6
- xorl %eax,%r12d
- shrdl $5,%r13d,%r13d
- xorl %ecx,%r14d
- vpaddd %xmm6,%xmm3,%xmm3
- andl %r10d,%r12d
- xorl %r10d,%r13d
- addl 56(%rsp),%ebx
- vpshufd $80,%xmm3,%xmm7
- movl %ecx,%r15d
- xorl %eax,%r12d
- shrdl $11,%r14d,%r14d
- vpsrld $10,%xmm7,%xmm6
- xorl %edx,%r15d
- addl %r12d,%ebx
- shrdl $6,%r13d,%r13d
- vpsrlq $17,%xmm7,%xmm7
- andl %r15d,%edi
- xorl %ecx,%r14d
- addl %r13d,%ebx
- vpxor %xmm7,%xmm6,%xmm6
- xorl %edx,%edi
- shrdl $2,%r14d,%r14d
- addl %ebx,%r9d
- vpsrlq $2,%xmm7,%xmm7
- addl %edi,%ebx
- movl %r9d,%r13d
- addl %ebx,%r14d
- vpxor %xmm7,%xmm6,%xmm6
- shrdl $14,%r13d,%r13d
- movl %r14d,%ebx
- movl %r10d,%r12d
- vpshufb %xmm9,%xmm6,%xmm6
- shrdl $9,%r14d,%r14d
- xorl %r9d,%r13d
- xorl %r11d,%r12d
- vpaddd %xmm6,%xmm3,%xmm3
- shrdl $5,%r13d,%r13d
- xorl %ebx,%r14d
- andl %r9d,%r12d
- vpaddd 96(%rbp),%xmm3,%xmm6
- xorl %r9d,%r13d
- addl 60(%rsp),%eax
- movl %ebx,%edi
- xorl %r11d,%r12d
- shrdl $11,%r14d,%r14d
- xorl %ecx,%edi
- addl %r12d,%eax
- shrdl $6,%r13d,%r13d
- andl %edi,%r15d
- xorl %ebx,%r14d
- addl %r13d,%eax
- xorl %ecx,%r15d
- shrdl $2,%r14d,%r14d
- addl %eax,%r8d
- addl %r15d,%eax
- movl %r8d,%r13d
- addl %eax,%r14d
- vmovdqa %xmm6,48(%rsp)
- cmpb $0,131(%rbp)
- jne .Lavx_00_47
- shrdl $14,%r13d,%r13d
- movl %r14d,%eax
- movl %r9d,%r12d
- shrdl $9,%r14d,%r14d
- xorl %r8d,%r13d
- xorl %r10d,%r12d
- shrdl $5,%r13d,%r13d
- xorl %eax,%r14d
- andl %r8d,%r12d
- xorl %r8d,%r13d
- addl 0(%rsp),%r11d
- movl %eax,%r15d
- xorl %r10d,%r12d
- shrdl $11,%r14d,%r14d
- xorl %ebx,%r15d
- addl %r12d,%r11d
- shrdl $6,%r13d,%r13d
- andl %r15d,%edi
- xorl %eax,%r14d
- addl %r13d,%r11d
- xorl %ebx,%edi
- shrdl $2,%r14d,%r14d
- addl %r11d,%edx
- addl %edi,%r11d
- movl %edx,%r13d
- addl %r11d,%r14d
- shrdl $14,%r13d,%r13d
- movl %r14d,%r11d
- movl %r8d,%r12d
- shrdl $9,%r14d,%r14d
- xorl %edx,%r13d
- xorl %r9d,%r12d
- shrdl $5,%r13d,%r13d
- xorl %r11d,%r14d
- andl %edx,%r12d
- xorl %edx,%r13d
- addl 4(%rsp),%r10d
- movl %r11d,%edi
- xorl %r9d,%r12d
- shrdl $11,%r14d,%r14d
- xorl %eax,%edi
- addl %r12d,%r10d
- shrdl $6,%r13d,%r13d
- andl %edi,%r15d
- xorl %r11d,%r14d
- addl %r13d,%r10d
- xorl %eax,%r15d
- shrdl $2,%r14d,%r14d
- addl %r10d,%ecx
- addl %r15d,%r10d
- movl %ecx,%r13d
- addl %r10d,%r14d
- shrdl $14,%r13d,%r13d
- movl %r14d,%r10d
- movl %edx,%r12d
- shrdl $9,%r14d,%r14d
- xorl %ecx,%r13d
- xorl %r8d,%r12d
- shrdl $5,%r13d,%r13d
- xorl %r10d,%r14d
- andl %ecx,%r12d
- xorl %ecx,%r13d
- addl 8(%rsp),%r9d
- movl %r10d,%r15d
- xorl %r8d,%r12d
- shrdl $11,%r14d,%r14d
- xorl %r11d,%r15d
- addl %r12d,%r9d
- shrdl $6,%r13d,%r13d
- andl %r15d,%edi
- xorl %r10d,%r14d
- addl %r13d,%r9d
- xorl %r11d,%edi
- shrdl $2,%r14d,%r14d
- addl %r9d,%ebx
- addl %edi,%r9d
- movl %ebx,%r13d
- addl %r9d,%r14d
- shrdl $14,%r13d,%r13d
- movl %r14d,%r9d
- movl %ecx,%r12d
- shrdl $9,%r14d,%r14d
- xorl %ebx,%r13d
- xorl %edx,%r12d
- shrdl $5,%r13d,%r13d
- xorl %r9d,%r14d
- andl %ebx,%r12d
- xorl %ebx,%r13d
- addl 12(%rsp),%r8d
- movl %r9d,%edi
- xorl %edx,%r12d
- shrdl $11,%r14d,%r14d
- xorl %r10d,%edi
- addl %r12d,%r8d
- shrdl $6,%r13d,%r13d
- andl %edi,%r15d
- xorl %r9d,%r14d
- addl %r13d,%r8d
- xorl %r10d,%r15d
- shrdl $2,%r14d,%r14d
- addl %r8d,%eax
- addl %r15d,%r8d
- movl %eax,%r13d
- addl %r8d,%r14d
- shrdl $14,%r13d,%r13d
- movl %r14d,%r8d
- movl %ebx,%r12d
- shrdl $9,%r14d,%r14d
- xorl %eax,%r13d
- xorl %ecx,%r12d
- shrdl $5,%r13d,%r13d
- xorl %r8d,%r14d
- andl %eax,%r12d
- xorl %eax,%r13d
- addl 16(%rsp),%edx
- movl %r8d,%r15d
- xorl %ecx,%r12d
- shrdl $11,%r14d,%r14d
- xorl %r9d,%r15d
- addl %r12d,%edx
- shrdl $6,%r13d,%r13d
- andl %r15d,%edi
- xorl %r8d,%r14d
- addl %r13d,%edx
- xorl %r9d,%edi
- shrdl $2,%r14d,%r14d
- addl %edx,%r11d
- addl %edi,%edx
- movl %r11d,%r13d
- addl %edx,%r14d
- shrdl $14,%r13d,%r13d
- movl %r14d,%edx
- movl %eax,%r12d
- shrdl $9,%r14d,%r14d
- xorl %r11d,%r13d
- xorl %ebx,%r12d
- shrdl $5,%r13d,%r13d
- xorl %edx,%r14d
- andl %r11d,%r12d
- xorl %r11d,%r13d
- addl 20(%rsp),%ecx
- movl %edx,%edi
- xorl %ebx,%r12d
- shrdl $11,%r14d,%r14d
- xorl %r8d,%edi
- addl %r12d,%ecx
- shrdl $6,%r13d,%r13d
- andl %edi,%r15d
- xorl %edx,%r14d
- addl %r13d,%ecx
- xorl %r8d,%r15d
- shrdl $2,%r14d,%r14d
- addl %ecx,%r10d
- addl %r15d,%ecx
- movl %r10d,%r13d
- addl %ecx,%r14d
- shrdl $14,%r13d,%r13d
- movl %r14d,%ecx
- movl %r11d,%r12d
- shrdl $9,%r14d,%r14d
- xorl %r10d,%r13d
- xorl %eax,%r12d
- shrdl $5,%r13d,%r13d
- xorl %ecx,%r14d
- andl %r10d,%r12d
- xorl %r10d,%r13d
- addl 24(%rsp),%ebx
- movl %ecx,%r15d
- xorl %eax,%r12d
- shrdl $11,%r14d,%r14d
- xorl %edx,%r15d
- addl %r12d,%ebx
- shrdl $6,%r13d,%r13d
- andl %r15d,%edi
- xorl %ecx,%r14d
- addl %r13d,%ebx
- xorl %edx,%edi
- shrdl $2,%r14d,%r14d
- addl %ebx,%r9d
- addl %edi,%ebx
- movl %r9d,%r13d
- addl %ebx,%r14d
- shrdl $14,%r13d,%r13d
- movl %r14d,%ebx
- movl %r10d,%r12d
- shrdl $9,%r14d,%r14d
- xorl %r9d,%r13d
- xorl %r11d,%r12d
- shrdl $5,%r13d,%r13d
- xorl %ebx,%r14d
- andl %r9d,%r12d
- xorl %r9d,%r13d
- addl 28(%rsp),%eax
- movl %ebx,%edi
- xorl %r11d,%r12d
- shrdl $11,%r14d,%r14d
- xorl %ecx,%edi
- addl %r12d,%eax
- shrdl $6,%r13d,%r13d
- andl %edi,%r15d
- xorl %ebx,%r14d
- addl %r13d,%eax
- xorl %ecx,%r15d
- shrdl $2,%r14d,%r14d
- addl %eax,%r8d
- addl %r15d,%eax
- movl %r8d,%r13d
- addl %eax,%r14d
- shrdl $14,%r13d,%r13d
- movl %r14d,%eax
- movl %r9d,%r12d
- shrdl $9,%r14d,%r14d
- xorl %r8d,%r13d
- xorl %r10d,%r12d
- shrdl $5,%r13d,%r13d
- xorl %eax,%r14d
- andl %r8d,%r12d
- xorl %r8d,%r13d
- addl 32(%rsp),%r11d
- movl %eax,%r15d
- xorl %r10d,%r12d
- shrdl $11,%r14d,%r14d
- xorl %ebx,%r15d
- addl %r12d,%r11d
- shrdl $6,%r13d,%r13d
- andl %r15d,%edi
- xorl %eax,%r14d
- addl %r13d,%r11d
- xorl %ebx,%edi
- shrdl $2,%r14d,%r14d
- addl %r11d,%edx
- addl %edi,%r11d
- movl %edx,%r13d
- addl %r11d,%r14d
- shrdl $14,%r13d,%r13d
- movl %r14d,%r11d
- movl %r8d,%r12d
- shrdl $9,%r14d,%r14d
- xorl %edx,%r13d
- xorl %r9d,%r12d
- shrdl $5,%r13d,%r13d
- xorl %r11d,%r14d
- andl %edx,%r12d
- xorl %edx,%r13d
- addl 36(%rsp),%r10d
- movl %r11d,%edi
- xorl %r9d,%r12d
- shrdl $11,%r14d,%r14d
- xorl %eax,%edi
- addl %r12d,%r10d
- shrdl $6,%r13d,%r13d
- andl %edi,%r15d
- xorl %r11d,%r14d
- addl %r13d,%r10d
- xorl %eax,%r15d
- shrdl $2,%r14d,%r14d
- addl %r10d,%ecx
- addl %r15d,%r10d
- movl %ecx,%r13d
- addl %r10d,%r14d
- shrdl $14,%r13d,%r13d
- movl %r14d,%r10d
- movl %edx,%r12d
- shrdl $9,%r14d,%r14d
- xorl %ecx,%r13d
- xorl %r8d,%r12d
- shrdl $5,%r13d,%r13d
- xorl %r10d,%r14d
- andl %ecx,%r12d
- xorl %ecx,%r13d
- addl 40(%rsp),%r9d
- movl %r10d,%r15d
- xorl %r8d,%r12d
- shrdl $11,%r14d,%r14d
- xorl %r11d,%r15d
- addl %r12d,%r9d
- shrdl $6,%r13d,%r13d
- andl %r15d,%edi
- xorl %r10d,%r14d
- addl %r13d,%r9d
- xorl %r11d,%edi
- shrdl $2,%r14d,%r14d
- addl %r9d,%ebx
- addl %edi,%r9d
- movl %ebx,%r13d
- addl %r9d,%r14d
- shrdl $14,%r13d,%r13d
- movl %r14d,%r9d
- movl %ecx,%r12d
- shrdl $9,%r14d,%r14d
- xorl %ebx,%r13d
- xorl %edx,%r12d
- shrdl $5,%r13d,%r13d
- xorl %r9d,%r14d
- andl %ebx,%r12d
- xorl %ebx,%r13d
- addl 44(%rsp),%r8d
- movl %r9d,%edi
- xorl %edx,%r12d
- shrdl $11,%r14d,%r14d
- xorl %r10d,%edi
- addl %r12d,%r8d
- shrdl $6,%r13d,%r13d
- andl %edi,%r15d
- xorl %r9d,%r14d
- addl %r13d,%r8d
- xorl %r10d,%r15d
- shrdl $2,%r14d,%r14d
- addl %r8d,%eax
- addl %r15d,%r8d
- movl %eax,%r13d
- addl %r8d,%r14d
- shrdl $14,%r13d,%r13d
- movl %r14d,%r8d
- movl %ebx,%r12d
- shrdl $9,%r14d,%r14d
- xorl %eax,%r13d
- xorl %ecx,%r12d
- shrdl $5,%r13d,%r13d
- xorl %r8d,%r14d
- andl %eax,%r12d
- xorl %eax,%r13d
- addl 48(%rsp),%edx
- movl %r8d,%r15d
- xorl %ecx,%r12d
- shrdl $11,%r14d,%r14d
- xorl %r9d,%r15d
- addl %r12d,%edx
- shrdl $6,%r13d,%r13d
- andl %r15d,%edi
- xorl %r8d,%r14d
- addl %r13d,%edx
- xorl %r9d,%edi
- shrdl $2,%r14d,%r14d
- addl %edx,%r11d
- addl %edi,%edx
- movl %r11d,%r13d
- addl %edx,%r14d
- shrdl $14,%r13d,%r13d
- movl %r14d,%edx
- movl %eax,%r12d
- shrdl $9,%r14d,%r14d
- xorl %r11d,%r13d
- xorl %ebx,%r12d
- shrdl $5,%r13d,%r13d
- xorl %edx,%r14d
- andl %r11d,%r12d
- xorl %r11d,%r13d
- addl 52(%rsp),%ecx
- movl %edx,%edi
- xorl %ebx,%r12d
- shrdl $11,%r14d,%r14d
- xorl %r8d,%edi
- addl %r12d,%ecx
- shrdl $6,%r13d,%r13d
- andl %edi,%r15d
- xorl %edx,%r14d
- addl %r13d,%ecx
- xorl %r8d,%r15d
- shrdl $2,%r14d,%r14d
- addl %ecx,%r10d
- addl %r15d,%ecx
- movl %r10d,%r13d
- addl %ecx,%r14d
- shrdl $14,%r13d,%r13d
- movl %r14d,%ecx
- movl %r11d,%r12d
- shrdl $9,%r14d,%r14d
- xorl %r10d,%r13d
- xorl %eax,%r12d
- shrdl $5,%r13d,%r13d
- xorl %ecx,%r14d
- andl %r10d,%r12d
- xorl %r10d,%r13d
- addl 56(%rsp),%ebx
- movl %ecx,%r15d
- xorl %eax,%r12d
- shrdl $11,%r14d,%r14d
- xorl %edx,%r15d
- addl %r12d,%ebx
- shrdl $6,%r13d,%r13d
- andl %r15d,%edi
- xorl %ecx,%r14d
- addl %r13d,%ebx
- xorl %edx,%edi
- shrdl $2,%r14d,%r14d
- addl %ebx,%r9d
- addl %edi,%ebx
- movl %r9d,%r13d
- addl %ebx,%r14d
- shrdl $14,%r13d,%r13d
- movl %r14d,%ebx
- movl %r10d,%r12d
- shrdl $9,%r14d,%r14d
- xorl %r9d,%r13d
- xorl %r11d,%r12d
- shrdl $5,%r13d,%r13d
- xorl %ebx,%r14d
- andl %r9d,%r12d
- xorl %r9d,%r13d
- addl 60(%rsp),%eax
- movl %ebx,%edi
- xorl %r11d,%r12d
- shrdl $11,%r14d,%r14d
- xorl %ecx,%edi
- addl %r12d,%eax
- shrdl $6,%r13d,%r13d
- andl %edi,%r15d
- xorl %ebx,%r14d
- addl %r13d,%eax
- xorl %ecx,%r15d
- shrdl $2,%r14d,%r14d
- addl %eax,%r8d
- addl %r15d,%eax
- movl %r8d,%r13d
- addl %eax,%r14d
- movq 64+0(%rsp),%rdi
- movl %r14d,%eax
-
- addl 0(%rdi),%eax
- leaq 64(%rsi),%rsi
- addl 4(%rdi),%ebx
- addl 8(%rdi),%ecx
- addl 12(%rdi),%edx
- addl 16(%rdi),%r8d
- addl 20(%rdi),%r9d
- addl 24(%rdi),%r10d
- addl 28(%rdi),%r11d
-
- cmpq 64+16(%rsp),%rsi
-
- movl %eax,0(%rdi)
- movl %ebx,4(%rdi)
- movl %ecx,8(%rdi)
- movl %edx,12(%rdi)
- movl %r8d,16(%rdi)
- movl %r9d,20(%rdi)
- movl %r10d,24(%rdi)
- movl %r11d,28(%rdi)
- jb .Lloop_avx
-
- movq 88(%rsp),%rsi
-.cfi_def_cfa %rsi,8
- vzeroupper
- movq -48(%rsi),%r15
-.cfi_restore %r15
- movq -40(%rsi),%r14
-.cfi_restore %r14
- movq -32(%rsi),%r13
-.cfi_restore %r13
- movq -24(%rsi),%r12
-.cfi_restore %r12
- movq -16(%rsi),%rbp
-.cfi_restore %rbp
- movq -8(%rsi),%rbx
-.cfi_restore %rbx
- leaq (%rsi),%rsp
-.cfi_def_cfa_register %rsp
-.Lepilogue_avx:
- .byte 0xf3,0xc3
-.cfi_endproc
-.size sha256_block_data_order_avx,.-sha256_block_data_order_avx
-.type sha256_block_data_order_avx2,@function
-.align 64
-sha256_block_data_order_avx2:
-.cfi_startproc
-.Lavx2_shortcut:
- movq %rsp,%rax
-.cfi_def_cfa_register %rax
- pushq %rbx
-.cfi_offset %rbx,-16
- pushq %rbp
-.cfi_offset %rbp,-24
- pushq %r12
-.cfi_offset %r12,-32
- pushq %r13
-.cfi_offset %r13,-40
- pushq %r14
-.cfi_offset %r14,-48
- pushq %r15
-.cfi_offset %r15,-56
- subq $544,%rsp
- shlq $4,%rdx
- andq $-1024,%rsp
- leaq (%rsi,%rdx,4),%rdx
- addq $448,%rsp
- movq %rdi,64+0(%rsp)
- movq %rsi,64+8(%rsp)
- movq %rdx,64+16(%rsp)
- movq %rax,88(%rsp)
-.cfi_escape 0x0f,0x06,0x77,0xd8,0x00,0x06,0x23,0x08
-.Lprologue_avx2:
-
- vzeroupper
- subq $-64,%rsi
- movl 0(%rdi),%eax
- movq %rsi,%r12
- movl 4(%rdi),%ebx
- cmpq %rdx,%rsi
- movl 8(%rdi),%ecx
- cmoveq %rsp,%r12
- movl 12(%rdi),%edx
- movl 16(%rdi),%r8d
- movl 20(%rdi),%r9d
- movl 24(%rdi),%r10d
- movl 28(%rdi),%r11d
- vmovdqa K256+512+32(%rip),%ymm8
- vmovdqa K256+512+64(%rip),%ymm9
- jmp .Loop_avx2
-.align 16
-.Loop_avx2:
- vmovdqa K256+512(%rip),%ymm7
- vmovdqu -64+0(%rsi),%xmm0
- vmovdqu -64+16(%rsi),%xmm1
- vmovdqu -64+32(%rsi),%xmm2
- vmovdqu -64+48(%rsi),%xmm3
-
- vinserti128 $1,(%r12),%ymm0,%ymm0
- vinserti128 $1,16(%r12),%ymm1,%ymm1
- vpshufb %ymm7,%ymm0,%ymm0
- vinserti128 $1,32(%r12),%ymm2,%ymm2
- vpshufb %ymm7,%ymm1,%ymm1
- vinserti128 $1,48(%r12),%ymm3,%ymm3
-
- leaq K256(%rip),%rbp
- vpshufb %ymm7,%ymm2,%ymm2
- vpaddd 0(%rbp),%ymm0,%ymm4
- vpshufb %ymm7,%ymm3,%ymm3
- vpaddd 32(%rbp),%ymm1,%ymm5
- vpaddd 64(%rbp),%ymm2,%ymm6
- vpaddd 96(%rbp),%ymm3,%ymm7
- vmovdqa %ymm4,0(%rsp)
- xorl %r14d,%r14d
- vmovdqa %ymm5,32(%rsp)
- leaq -64(%rsp),%rsp
- movl %ebx,%edi
- vmovdqa %ymm6,0(%rsp)
- xorl %ecx,%edi
- vmovdqa %ymm7,32(%rsp)
- movl %r9d,%r12d
- subq $-32*4,%rbp
- jmp .Lavx2_00_47
-
-.align 16
-.Lavx2_00_47:
- leaq -64(%rsp),%rsp
- vpalignr $4,%ymm0,%ymm1,%ymm4
- addl 0+128(%rsp),%r11d
- andl %r8d,%r12d
- rorxl $25,%r8d,%r13d
- vpalignr $4,%ymm2,%ymm3,%ymm7
- rorxl $11,%r8d,%r15d
- leal (%rax,%r14,1),%eax
- leal (%r11,%r12,1),%r11d
- vpsrld $7,%ymm4,%ymm6
- andnl %r10d,%r8d,%r12d
- xorl %r15d,%r13d
- rorxl $6,%r8d,%r14d
- vpaddd %ymm7,%ymm0,%ymm0
- leal (%r11,%r12,1),%r11d
- xorl %r14d,%r13d
- movl %eax,%r15d
- vpsrld $3,%ymm4,%ymm7
- rorxl $22,%eax,%r12d
- leal (%r11,%r13,1),%r11d
- xorl %ebx,%r15d
- vpslld $14,%ymm4,%ymm5
- rorxl $13,%eax,%r14d
- rorxl $2,%eax,%r13d
- leal (%rdx,%r11,1),%edx
- vpxor %ymm6,%ymm7,%ymm4
- andl %r15d,%edi
- xorl %r12d,%r14d
- xorl %ebx,%edi
- vpshufd $250,%ymm3,%ymm7
- xorl %r13d,%r14d
- leal (%r11,%rdi,1),%r11d
- movl %r8d,%r12d
- vpsrld $11,%ymm6,%ymm6
- addl 4+128(%rsp),%r10d
- andl %edx,%r12d
- rorxl $25,%edx,%r13d
- vpxor %ymm5,%ymm4,%ymm4
- rorxl $11,%edx,%edi
- leal (%r11,%r14,1),%r11d
- leal (%r10,%r12,1),%r10d
- vpslld $11,%ymm5,%ymm5
- andnl %r9d,%edx,%r12d
- xorl %edi,%r13d
- rorxl $6,%edx,%r14d
- vpxor %ymm6,%ymm4,%ymm4
- leal (%r10,%r12,1),%r10d
- xorl %r14d,%r13d
- movl %r11d,%edi
- vpsrld $10,%ymm7,%ymm6
- rorxl $22,%r11d,%r12d
- leal (%r10,%r13,1),%r10d
- xorl %eax,%edi
- vpxor %ymm5,%ymm4,%ymm4
- rorxl $13,%r11d,%r14d
- rorxl $2,%r11d,%r13d
- leal (%rcx,%r10,1),%ecx
- vpsrlq $17,%ymm7,%ymm7
- andl %edi,%r15d
- xorl %r12d,%r14d
- xorl %eax,%r15d
- vpaddd %ymm4,%ymm0,%ymm0
- xorl %r13d,%r14d
- leal (%r10,%r15,1),%r10d
- movl %edx,%r12d
- vpxor %ymm7,%ymm6,%ymm6
- addl 8+128(%rsp),%r9d
- andl %ecx,%r12d
- rorxl $25,%ecx,%r13d
- vpsrlq $2,%ymm7,%ymm7
- rorxl $11,%ecx,%r15d
- leal (%r10,%r14,1),%r10d
- leal (%r9,%r12,1),%r9d
- vpxor %ymm7,%ymm6,%ymm6
- andnl %r8d,%ecx,%r12d
- xorl %r15d,%r13d
- rorxl $6,%ecx,%r14d
- vpshufb %ymm8,%ymm6,%ymm6
- leal (%r9,%r12,1),%r9d
- xorl %r14d,%r13d
- movl %r10d,%r15d
- vpaddd %ymm6,%ymm0,%ymm0
- rorxl $22,%r10d,%r12d
- leal (%r9,%r13,1),%r9d
- xorl %r11d,%r15d
- vpshufd $80,%ymm0,%ymm7
- rorxl $13,%r10d,%r14d
- rorxl $2,%r10d,%r13d
- leal (%rbx,%r9,1),%ebx
- vpsrld $10,%ymm7,%ymm6
- andl %r15d,%edi
- xorl %r12d,%r14d
- xorl %r11d,%edi
- vpsrlq $17,%ymm7,%ymm7
- xorl %r13d,%r14d
- leal (%r9,%rdi,1),%r9d
- movl %ecx,%r12d
- vpxor %ymm7,%ymm6,%ymm6
- addl 12+128(%rsp),%r8d
- andl %ebx,%r12d
- rorxl $25,%ebx,%r13d
- vpsrlq $2,%ymm7,%ymm7
- rorxl $11,%ebx,%edi
- leal (%r9,%r14,1),%r9d
- leal (%r8,%r12,1),%r8d
- vpxor %ymm7,%ymm6,%ymm6
- andnl %edx,%ebx,%r12d
- xorl %edi,%r13d
- rorxl $6,%ebx,%r14d
- vpshufb %ymm9,%ymm6,%ymm6
- leal (%r8,%r12,1),%r8d
- xorl %r14d,%r13d
- movl %r9d,%edi
- vpaddd %ymm6,%ymm0,%ymm0
- rorxl $22,%r9d,%r12d
- leal (%r8,%r13,1),%r8d
- xorl %r10d,%edi
- vpaddd 0(%rbp),%ymm0,%ymm6
- rorxl $13,%r9d,%r14d
- rorxl $2,%r9d,%r13d
- leal (%rax,%r8,1),%eax
- andl %edi,%r15d
- xorl %r12d,%r14d
- xorl %r10d,%r15d
- xorl %r13d,%r14d
- leal (%r8,%r15,1),%r8d
- movl %ebx,%r12d
- vmovdqa %ymm6,0(%rsp)
- vpalignr $4,%ymm1,%ymm2,%ymm4
- addl 32+128(%rsp),%edx
- andl %eax,%r12d
- rorxl $25,%eax,%r13d
- vpalignr $4,%ymm3,%ymm0,%ymm7
- rorxl $11,%eax,%r15d
- leal (%r8,%r14,1),%r8d
- leal (%rdx,%r12,1),%edx
- vpsrld $7,%ymm4,%ymm6
- andnl %ecx,%eax,%r12d
- xorl %r15d,%r13d
- rorxl $6,%eax,%r14d
- vpaddd %ymm7,%ymm1,%ymm1
- leal (%rdx,%r12,1),%edx
- xorl %r14d,%r13d
- movl %r8d,%r15d
- vpsrld $3,%ymm4,%ymm7
- rorxl $22,%r8d,%r12d
- leal (%rdx,%r13,1),%edx
- xorl %r9d,%r15d
- vpslld $14,%ymm4,%ymm5
- rorxl $13,%r8d,%r14d
- rorxl $2,%r8d,%r13d
- leal (%r11,%rdx,1),%r11d
- vpxor %ymm6,%ymm7,%ymm4
- andl %r15d,%edi
- xorl %r12d,%r14d
- xorl %r9d,%edi
- vpshufd $250,%ymm0,%ymm7
- xorl %r13d,%r14d
- leal (%rdx,%rdi,1),%edx
- movl %eax,%r12d
- vpsrld $11,%ymm6,%ymm6
- addl 36+128(%rsp),%ecx
- andl %r11d,%r12d
- rorxl $25,%r11d,%r13d
- vpxor %ymm5,%ymm4,%ymm4
- rorxl $11,%r11d,%edi
- leal (%rdx,%r14,1),%edx
- leal (%rcx,%r12,1),%ecx
- vpslld $11,%ymm5,%ymm5
- andnl %ebx,%r11d,%r12d
- xorl %edi,%r13d
- rorxl $6,%r11d,%r14d
- vpxor %ymm6,%ymm4,%ymm4
- leal (%rcx,%r12,1),%ecx
- xorl %r14d,%r13d
- movl %edx,%edi
- vpsrld $10,%ymm7,%ymm6
- rorxl $22,%edx,%r12d
- leal (%rcx,%r13,1),%ecx
- xorl %r8d,%edi
- vpxor %ymm5,%ymm4,%ymm4
- rorxl $13,%edx,%r14d
- rorxl $2,%edx,%r13d
- leal (%r10,%rcx,1),%r10d
- vpsrlq $17,%ymm7,%ymm7
- andl %edi,%r15d
- xorl %r12d,%r14d
- xorl %r8d,%r15d
- vpaddd %ymm4,%ymm1,%ymm1
- xorl %r13d,%r14d
- leal (%rcx,%r15,1),%ecx
- movl %r11d,%r12d
- vpxor %ymm7,%ymm6,%ymm6
- addl 40+128(%rsp),%ebx
- andl %r10d,%r12d
- rorxl $25,%r10d,%r13d
- vpsrlq $2,%ymm7,%ymm7
- rorxl $11,%r10d,%r15d
- leal (%rcx,%r14,1),%ecx
- leal (%rbx,%r12,1),%ebx
- vpxor %ymm7,%ymm6,%ymm6
- andnl %eax,%r10d,%r12d
- xorl %r15d,%r13d
- rorxl $6,%r10d,%r14d
- vpshufb %ymm8,%ymm6,%ymm6
- leal (%rbx,%r12,1),%ebx
- xorl %r14d,%r13d
- movl %ecx,%r15d
- vpaddd %ymm6,%ymm1,%ymm1
- rorxl $22,%ecx,%r12d
- leal (%rbx,%r13,1),%ebx
- xorl %edx,%r15d
- vpshufd $80,%ymm1,%ymm7
- rorxl $13,%ecx,%r14d
- rorxl $2,%ecx,%r13d
- leal (%r9,%rbx,1),%r9d
- vpsrld $10,%ymm7,%ymm6
- andl %r15d,%edi
- xorl %r12d,%r14d
- xorl %edx,%edi
- vpsrlq $17,%ymm7,%ymm7
- xorl %r13d,%r14d
- leal (%rbx,%rdi,1),%ebx
- movl %r10d,%r12d
- vpxor %ymm7,%ymm6,%ymm6
- addl 44+128(%rsp),%eax
- andl %r9d,%r12d
- rorxl $25,%r9d,%r13d
- vpsrlq $2,%ymm7,%ymm7
- rorxl $11,%r9d,%edi
- leal (%rbx,%r14,1),%ebx
- leal (%rax,%r12,1),%eax
- vpxor %ymm7,%ymm6,%ymm6
- andnl %r11d,%r9d,%r12d
- xorl %edi,%r13d
- rorxl $6,%r9d,%r14d
- vpshufb %ymm9,%ymm6,%ymm6
- leal (%rax,%r12,1),%eax
- xorl %r14d,%r13d
- movl %ebx,%edi
- vpaddd %ymm6,%ymm1,%ymm1
- rorxl $22,%ebx,%r12d
- leal (%rax,%r13,1),%eax
- xorl %ecx,%edi
- vpaddd 32(%rbp),%ymm1,%ymm6
- rorxl $13,%ebx,%r14d
- rorxl $2,%ebx,%r13d
- leal (%r8,%rax,1),%r8d
- andl %edi,%r15d
- xorl %r12d,%r14d
- xorl %ecx,%r15d
- xorl %r13d,%r14d
- leal (%rax,%r15,1),%eax
- movl %r9d,%r12d
- vmovdqa %ymm6,32(%rsp)
- leaq -64(%rsp),%rsp
- vpalignr $4,%ymm2,%ymm3,%ymm4
- addl 0+128(%rsp),%r11d
- andl %r8d,%r12d
- rorxl $25,%r8d,%r13d
- vpalignr $4,%ymm0,%ymm1,%ymm7
- rorxl $11,%r8d,%r15d
- leal (%rax,%r14,1),%eax
- leal (%r11,%r12,1),%r11d
- vpsrld $7,%ymm4,%ymm6
- andnl %r10d,%r8d,%r12d
- xorl %r15d,%r13d
- rorxl $6,%r8d,%r14d
- vpaddd %ymm7,%ymm2,%ymm2
- leal (%r11,%r12,1),%r11d
- xorl %r14d,%r13d
- movl %eax,%r15d
- vpsrld $3,%ymm4,%ymm7
- rorxl $22,%eax,%r12d
- leal (%r11,%r13,1),%r11d
- xorl %ebx,%r15d
- vpslld $14,%ymm4,%ymm5
- rorxl $13,%eax,%r14d
- rorxl $2,%eax,%r13d
- leal (%rdx,%r11,1),%edx
- vpxor %ymm6,%ymm7,%ymm4
- andl %r15d,%edi
- xorl %r12d,%r14d
- xorl %ebx,%edi
- vpshufd $250,%ymm1,%ymm7
- xorl %r13d,%r14d
- leal (%r11,%rdi,1),%r11d
- movl %r8d,%r12d
- vpsrld $11,%ymm6,%ymm6
- addl 4+128(%rsp),%r10d
- andl %edx,%r12d
- rorxl $25,%edx,%r13d
- vpxor %ymm5,%ymm4,%ymm4
- rorxl $11,%edx,%edi
- leal (%r11,%r14,1),%r11d
- leal (%r10,%r12,1),%r10d
- vpslld $11,%ymm5,%ymm5
- andnl %r9d,%edx,%r12d
- xorl %edi,%r13d
- rorxl $6,%edx,%r14d
- vpxor %ymm6,%ymm4,%ymm4
- leal (%r10,%r12,1),%r10d
- xorl %r14d,%r13d
- movl %r11d,%edi
- vpsrld $10,%ymm7,%ymm6
- rorxl $22,%r11d,%r12d
- leal (%r10,%r13,1),%r10d
- xorl %eax,%edi
- vpxor %ymm5,%ymm4,%ymm4
- rorxl $13,%r11d,%r14d
- rorxl $2,%r11d,%r13d
- leal (%rcx,%r10,1),%ecx
- vpsrlq $17,%ymm7,%ymm7
- andl %edi,%r15d
- xorl %r12d,%r14d
- xorl %eax,%r15d
- vpaddd %ymm4,%ymm2,%ymm2
- xorl %r13d,%r14d
- leal (%r10,%r15,1),%r10d
- movl %edx,%r12d
- vpxor %ymm7,%ymm6,%ymm6
- addl 8+128(%rsp),%r9d
- andl %ecx,%r12d
- rorxl $25,%ecx,%r13d
- vpsrlq $2,%ymm7,%ymm7
- rorxl $11,%ecx,%r15d
- leal (%r10,%r14,1),%r10d
- leal (%r9,%r12,1),%r9d
- vpxor %ymm7,%ymm6,%ymm6
- andnl %r8d,%ecx,%r12d
- xorl %r15d,%r13d
- rorxl $6,%ecx,%r14d
- vpshufb %ymm8,%ymm6,%ymm6
- leal (%r9,%r12,1),%r9d
- xorl %r14d,%r13d
- movl %r10d,%r15d
- vpaddd %ymm6,%ymm2,%ymm2
- rorxl $22,%r10d,%r12d
- leal (%r9,%r13,1),%r9d
- xorl %r11d,%r15d
- vpshufd $80,%ymm2,%ymm7
- rorxl $13,%r10d,%r14d
- rorxl $2,%r10d,%r13d
- leal (%rbx,%r9,1),%ebx
- vpsrld $10,%ymm7,%ymm6
- andl %r15d,%edi
- xorl %r12d,%r14d
- xorl %r11d,%edi
- vpsrlq $17,%ymm7,%ymm7
- xorl %r13d,%r14d
- leal (%r9,%rdi,1),%r9d
- movl %ecx,%r12d
- vpxor %ymm7,%ymm6,%ymm6
- addl 12+128(%rsp),%r8d
- andl %ebx,%r12d
- rorxl $25,%ebx,%r13d
- vpsrlq $2,%ymm7,%ymm7
- rorxl $11,%ebx,%edi
- leal (%r9,%r14,1),%r9d
- leal (%r8,%r12,1),%r8d
- vpxor %ymm7,%ymm6,%ymm6
- andnl %edx,%ebx,%r12d
- xorl %edi,%r13d
- rorxl $6,%ebx,%r14d
- vpshufb %ymm9,%ymm6,%ymm6
- leal (%r8,%r12,1),%r8d
- xorl %r14d,%r13d
- movl %r9d,%edi
- vpaddd %ymm6,%ymm2,%ymm2
- rorxl $22,%r9d,%r12d
- leal (%r8,%r13,1),%r8d
- xorl %r10d,%edi
- vpaddd 64(%rbp),%ymm2,%ymm6
- rorxl $13,%r9d,%r14d
- rorxl $2,%r9d,%r13d
- leal (%rax,%r8,1),%eax
- andl %edi,%r15d
- xorl %r12d,%r14d
- xorl %r10d,%r15d
- xorl %r13d,%r14d
- leal (%r8,%r15,1),%r8d
- movl %ebx,%r12d
- vmovdqa %ymm6,0(%rsp)
- vpalignr $4,%ymm3,%ymm0,%ymm4
- addl 32+128(%rsp),%edx
- andl %eax,%r12d
- rorxl $25,%eax,%r13d
- vpalignr $4,%ymm1,%ymm2,%ymm7
- rorxl $11,%eax,%r15d
- leal (%r8,%r14,1),%r8d
- leal (%rdx,%r12,1),%edx
- vpsrld $7,%ymm4,%ymm6
- andnl %ecx,%eax,%r12d
- xorl %r15d,%r13d
- rorxl $6,%eax,%r14d
- vpaddd %ymm7,%ymm3,%ymm3
- leal (%rdx,%r12,1),%edx
- xorl %r14d,%r13d
- movl %r8d,%r15d
- vpsrld $3,%ymm4,%ymm7
- rorxl $22,%r8d,%r12d
- leal (%rdx,%r13,1),%edx
- xorl %r9d,%r15d
- vpslld $14,%ymm4,%ymm5
- rorxl $13,%r8d,%r14d
- rorxl $2,%r8d,%r13d
- leal (%r11,%rdx,1),%r11d
- vpxor %ymm6,%ymm7,%ymm4
- andl %r15d,%edi
- xorl %r12d,%r14d
- xorl %r9d,%edi
- vpshufd $250,%ymm2,%ymm7
- xorl %r13d,%r14d
- leal (%rdx,%rdi,1),%edx
- movl %eax,%r12d
- vpsrld $11,%ymm6,%ymm6
- addl 36+128(%rsp),%ecx
- andl %r11d,%r12d
- rorxl $25,%r11d,%r13d
- vpxor %ymm5,%ymm4,%ymm4
- rorxl $11,%r11d,%edi
- leal (%rdx,%r14,1),%edx
- leal (%rcx,%r12,1),%ecx
- vpslld $11,%ymm5,%ymm5
- andnl %ebx,%r11d,%r12d
- xorl %edi,%r13d
- rorxl $6,%r11d,%r14d
- vpxor %ymm6,%ymm4,%ymm4
- leal (%rcx,%r12,1),%ecx
- xorl %r14d,%r13d
- movl %edx,%edi
- vpsrld $10,%ymm7,%ymm6
- rorxl $22,%edx,%r12d
- leal (%rcx,%r13,1),%ecx
- xorl %r8d,%edi
- vpxor %ymm5,%ymm4,%ymm4
- rorxl $13,%edx,%r14d
- rorxl $2,%edx,%r13d
- leal (%r10,%rcx,1),%r10d
- vpsrlq $17,%ymm7,%ymm7
- andl %edi,%r15d
- xorl %r12d,%r14d
- xorl %r8d,%r15d
- vpaddd %ymm4,%ymm3,%ymm3
- xorl %r13d,%r14d
- leal (%rcx,%r15,1),%ecx
- movl %r11d,%r12d
- vpxor %ymm7,%ymm6,%ymm6
- addl 40+128(%rsp),%ebx
- andl %r10d,%r12d
- rorxl $25,%r10d,%r13d
- vpsrlq $2,%ymm7,%ymm7
- rorxl $11,%r10d,%r15d
- leal (%rcx,%r14,1),%ecx
- leal (%rbx,%r12,1),%ebx
- vpxor %ymm7,%ymm6,%ymm6
- andnl %eax,%r10d,%r12d
- xorl %r15d,%r13d
- rorxl $6,%r10d,%r14d
- vpshufb %ymm8,%ymm6,%ymm6
- leal (%rbx,%r12,1),%ebx
- xorl %r14d,%r13d
- movl %ecx,%r15d
- vpaddd %ymm6,%ymm3,%ymm3
- rorxl $22,%ecx,%r12d
- leal (%rbx,%r13,1),%ebx
- xorl %edx,%r15d
- vpshufd $80,%ymm3,%ymm7
- rorxl $13,%ecx,%r14d
- rorxl $2,%ecx,%r13d
- leal (%r9,%rbx,1),%r9d
- vpsrld $10,%ymm7,%ymm6
- andl %r15d,%edi
- xorl %r12d,%r14d
- xorl %edx,%edi
- vpsrlq $17,%ymm7,%ymm7
- xorl %r13d,%r14d
- leal (%rbx,%rdi,1),%ebx
- movl %r10d,%r12d
- vpxor %ymm7,%ymm6,%ymm6
- addl 44+128(%rsp),%eax
- andl %r9d,%r12d
- rorxl $25,%r9d,%r13d
- vpsrlq $2,%ymm7,%ymm7
- rorxl $11,%r9d,%edi
- leal (%rbx,%r14,1),%ebx
- leal (%rax,%r12,1),%eax
- vpxor %ymm7,%ymm6,%ymm6
- andnl %r11d,%r9d,%r12d
- xorl %edi,%r13d
- rorxl $6,%r9d,%r14d
- vpshufb %ymm9,%ymm6,%ymm6
- leal (%rax,%r12,1),%eax
- xorl %r14d,%r13d
- movl %ebx,%edi
- vpaddd %ymm6,%ymm3,%ymm3
- rorxl $22,%ebx,%r12d
- leal (%rax,%r13,1),%eax
- xorl %ecx,%edi
- vpaddd 96(%rbp),%ymm3,%ymm6
- rorxl $13,%ebx,%r14d
- rorxl $2,%ebx,%r13d
- leal (%r8,%rax,1),%r8d
- andl %edi,%r15d
- xorl %r12d,%r14d
- xorl %ecx,%r15d
- xorl %r13d,%r14d
- leal (%rax,%r15,1),%eax
- movl %r9d,%r12d
- vmovdqa %ymm6,32(%rsp)
- leaq 128(%rbp),%rbp
- cmpb $0,3(%rbp)
- jne .Lavx2_00_47
- addl 0+64(%rsp),%r11d
- andl %r8d,%r12d
- rorxl $25,%r8d,%r13d
- rorxl $11,%r8d,%r15d
- leal (%rax,%r14,1),%eax
- leal (%r11,%r12,1),%r11d
- andnl %r10d,%r8d,%r12d
- xorl %r15d,%r13d
- rorxl $6,%r8d,%r14d
- leal (%r11,%r12,1),%r11d
- xorl %r14d,%r13d
- movl %eax,%r15d
- rorxl $22,%eax,%r12d
- leal (%r11,%r13,1),%r11d
- xorl %ebx,%r15d
- rorxl $13,%eax,%r14d
- rorxl $2,%eax,%r13d
- leal (%rdx,%r11,1),%edx
- andl %r15d,%edi
- xorl %r12d,%r14d
- xorl %ebx,%edi
- xorl %r13d,%r14d
- leal (%r11,%rdi,1),%r11d
- movl %r8d,%r12d
- addl 4+64(%rsp),%r10d
- andl %edx,%r12d
- rorxl $25,%edx,%r13d
- rorxl $11,%edx,%edi
- leal (%r11,%r14,1),%r11d
- leal (%r10,%r12,1),%r10d
- andnl %r9d,%edx,%r12d
- xorl %edi,%r13d
- rorxl $6,%edx,%r14d
- leal (%r10,%r12,1),%r10d
- xorl %r14d,%r13d
- movl %r11d,%edi
- rorxl $22,%r11d,%r12d
- leal (%r10,%r13,1),%r10d
- xorl %eax,%edi
- rorxl $13,%r11d,%r14d
- rorxl $2,%r11d,%r13d
- leal (%rcx,%r10,1),%ecx
- andl %edi,%r15d
- xorl %r12d,%r14d
- xorl %eax,%r15d
- xorl %r13d,%r14d
- leal (%r10,%r15,1),%r10d
- movl %edx,%r12d
- addl 8+64(%rsp),%r9d
- andl %ecx,%r12d
- rorxl $25,%ecx,%r13d
- rorxl $11,%ecx,%r15d
- leal (%r10,%r14,1),%r10d
- leal (%r9,%r12,1),%r9d
- andnl %r8d,%ecx,%r12d
- xorl %r15d,%r13d
- rorxl $6,%ecx,%r14d
- leal (%r9,%r12,1),%r9d
- xorl %r14d,%r13d
- movl %r10d,%r15d
- rorxl $22,%r10d,%r12d
- leal (%r9,%r13,1),%r9d
- xorl %r11d,%r15d
- rorxl $13,%r10d,%r14d
- rorxl $2,%r10d,%r13d
- leal (%rbx,%r9,1),%ebx
- andl %r15d,%edi
- xorl %r12d,%r14d
- xorl %r11d,%edi
- xorl %r13d,%r14d
- leal (%r9,%rdi,1),%r9d
- movl %ecx,%r12d
- addl 12+64(%rsp),%r8d
- andl %ebx,%r12d
- rorxl $25,%ebx,%r13d
- rorxl $11,%ebx,%edi
- leal (%r9,%r14,1),%r9d
- leal (%r8,%r12,1),%r8d
- andnl %edx,%ebx,%r12d
- xorl %edi,%r13d
- rorxl $6,%ebx,%r14d
- leal (%r8,%r12,1),%r8d
- xorl %r14d,%r13d
- movl %r9d,%edi
- rorxl $22,%r9d,%r12d
- leal (%r8,%r13,1),%r8d
- xorl %r10d,%edi
- rorxl $13,%r9d,%r14d
- rorxl $2,%r9d,%r13d
- leal (%rax,%r8,1),%eax
- andl %edi,%r15d
- xorl %r12d,%r14d
- xorl %r10d,%r15d
- xorl %r13d,%r14d
- leal (%r8,%r15,1),%r8d
- movl %ebx,%r12d
- addl 32+64(%rsp),%edx
- andl %eax,%r12d
- rorxl $25,%eax,%r13d
- rorxl $11,%eax,%r15d
- leal (%r8,%r14,1),%r8d
- leal (%rdx,%r12,1),%edx
- andnl %ecx,%eax,%r12d
- xorl %r15d,%r13d
- rorxl $6,%eax,%r14d
- leal (%rdx,%r12,1),%edx
- xorl %r14d,%r13d
- movl %r8d,%r15d
- rorxl $22,%r8d,%r12d
- leal (%rdx,%r13,1),%edx
- xorl %r9d,%r15d
- rorxl $13,%r8d,%r14d
- rorxl $2,%r8d,%r13d
- leal (%r11,%rdx,1),%r11d
- andl %r15d,%edi
- xorl %r12d,%r14d
- xorl %r9d,%edi
- xorl %r13d,%r14d
- leal (%rdx,%rdi,1),%edx
- movl %eax,%r12d
- addl 36+64(%rsp),%ecx
- andl %r11d,%r12d
- rorxl $25,%r11d,%r13d
- rorxl $11,%r11d,%edi
- leal (%rdx,%r14,1),%edx
- leal (%rcx,%r12,1),%ecx
- andnl %ebx,%r11d,%r12d
- xorl %edi,%r13d
- rorxl $6,%r11d,%r14d
- leal (%rcx,%r12,1),%ecx
- xorl %r14d,%r13d
- movl %edx,%edi
- rorxl $22,%edx,%r12d
- leal (%rcx,%r13,1),%ecx
- xorl %r8d,%edi
- rorxl $13,%edx,%r14d
- rorxl $2,%edx,%r13d
- leal (%r10,%rcx,1),%r10d
- andl %edi,%r15d
- xorl %r12d,%r14d
- xorl %r8d,%r15d
- xorl %r13d,%r14d
- leal (%rcx,%r15,1),%ecx
- movl %r11d,%r12d
- addl 40+64(%rsp),%ebx
- andl %r10d,%r12d
- rorxl $25,%r10d,%r13d
- rorxl $11,%r10d,%r15d
- leal (%rcx,%r14,1),%ecx
- leal (%rbx,%r12,1),%ebx
- andnl %eax,%r10d,%r12d
- xorl %r15d,%r13d
- rorxl $6,%r10d,%r14d
- leal (%rbx,%r12,1),%ebx
- xorl %r14d,%r13d
- movl %ecx,%r15d
- rorxl $22,%ecx,%r12d
- leal (%rbx,%r13,1),%ebx
- xorl %edx,%r15d
- rorxl $13,%ecx,%r14d
- rorxl $2,%ecx,%r13d
- leal (%r9,%rbx,1),%r9d
- andl %r15d,%edi
- xorl %r12d,%r14d
- xorl %edx,%edi
- xorl %r13d,%r14d
- leal (%rbx,%rdi,1),%ebx
- movl %r10d,%r12d
- addl 44+64(%rsp),%eax
- andl %r9d,%r12d
- rorxl $25,%r9d,%r13d
- rorxl $11,%r9d,%edi
- leal (%rbx,%r14,1),%ebx
- leal (%rax,%r12,1),%eax
- andnl %r11d,%r9d,%r12d
- xorl %edi,%r13d
- rorxl $6,%r9d,%r14d
- leal (%rax,%r12,1),%eax
- xorl %r14d,%r13d
- movl %ebx,%edi
- rorxl $22,%ebx,%r12d
- leal (%rax,%r13,1),%eax
- xorl %ecx,%edi
- rorxl $13,%ebx,%r14d
- rorxl $2,%ebx,%r13d
- leal (%r8,%rax,1),%r8d
- andl %edi,%r15d
- xorl %r12d,%r14d
- xorl %ecx,%r15d
- xorl %r13d,%r14d
- leal (%rax,%r15,1),%eax
- movl %r9d,%r12d
- addl 0(%rsp),%r11d
- andl %r8d,%r12d
- rorxl $25,%r8d,%r13d
- rorxl $11,%r8d,%r15d
- leal (%rax,%r14,1),%eax
- leal (%r11,%r12,1),%r11d
- andnl %r10d,%r8d,%r12d
- xorl %r15d,%r13d
- rorxl $6,%r8d,%r14d
- leal (%r11,%r12,1),%r11d
- xorl %r14d,%r13d
- movl %eax,%r15d
- rorxl $22,%eax,%r12d
- leal (%r11,%r13,1),%r11d
- xorl %ebx,%r15d
- rorxl $13,%eax,%r14d
- rorxl $2,%eax,%r13d
- leal (%rdx,%r11,1),%edx
- andl %r15d,%edi
- xorl %r12d,%r14d
- xorl %ebx,%edi
- xorl %r13d,%r14d
- leal (%r11,%rdi,1),%r11d
- movl %r8d,%r12d
- addl 4(%rsp),%r10d
- andl %edx,%r12d
- rorxl $25,%edx,%r13d
- rorxl $11,%edx,%edi
- leal (%r11,%r14,1),%r11d
- leal (%r10,%r12,1),%r10d
- andnl %r9d,%edx,%r12d
- xorl %edi,%r13d
- rorxl $6,%edx,%r14d
- leal (%r10,%r12,1),%r10d
- xorl %r14d,%r13d
- movl %r11d,%edi
- rorxl $22,%r11d,%r12d
- leal (%r10,%r13,1),%r10d
- xorl %eax,%edi
- rorxl $13,%r11d,%r14d
- rorxl $2,%r11d,%r13d
- leal (%rcx,%r10,1),%ecx
- andl %edi,%r15d
- xorl %r12d,%r14d
- xorl %eax,%r15d
- xorl %r13d,%r14d
- leal (%r10,%r15,1),%r10d
- movl %edx,%r12d
- addl 8(%rsp),%r9d
- andl %ecx,%r12d
- rorxl $25,%ecx,%r13d
- rorxl $11,%ecx,%r15d
- leal (%r10,%r14,1),%r10d
- leal (%r9,%r12,1),%r9d
- andnl %r8d,%ecx,%r12d
- xorl %r15d,%r13d
- rorxl $6,%ecx,%r14d
- leal (%r9,%r12,1),%r9d
- xorl %r14d,%r13d
- movl %r10d,%r15d
- rorxl $22,%r10d,%r12d
- leal (%r9,%r13,1),%r9d
- xorl %r11d,%r15d
- rorxl $13,%r10d,%r14d
- rorxl $2,%r10d,%r13d
- leal (%rbx,%r9,1),%ebx
- andl %r15d,%edi
- xorl %r12d,%r14d
- xorl %r11d,%edi
- xorl %r13d,%r14d
- leal (%r9,%rdi,1),%r9d
- movl %ecx,%r12d
- addl 12(%rsp),%r8d
- andl %ebx,%r12d
- rorxl $25,%ebx,%r13d
- rorxl $11,%ebx,%edi
- leal (%r9,%r14,1),%r9d
- leal (%r8,%r12,1),%r8d
- andnl %edx,%ebx,%r12d
- xorl %edi,%r13d
- rorxl $6,%ebx,%r14d
- leal (%r8,%r12,1),%r8d
- xorl %r14d,%r13d
- movl %r9d,%edi
- rorxl $22,%r9d,%r12d
- leal (%r8,%r13,1),%r8d
- xorl %r10d,%edi
- rorxl $13,%r9d,%r14d
- rorxl $2,%r9d,%r13d
- leal (%rax,%r8,1),%eax
- andl %edi,%r15d
- xorl %r12d,%r14d
- xorl %r10d,%r15d
- xorl %r13d,%r14d
- leal (%r8,%r15,1),%r8d
- movl %ebx,%r12d
- addl 32(%rsp),%edx
- andl %eax,%r12d
- rorxl $25,%eax,%r13d
- rorxl $11,%eax,%r15d
- leal (%r8,%r14,1),%r8d
- leal (%rdx,%r12,1),%edx
- andnl %ecx,%eax,%r12d
- xorl %r15d,%r13d
- rorxl $6,%eax,%r14d
- leal (%rdx,%r12,1),%edx
- xorl %r14d,%r13d
- movl %r8d,%r15d
- rorxl $22,%r8d,%r12d
- leal (%rdx,%r13,1),%edx
- xorl %r9d,%r15d
- rorxl $13,%r8d,%r14d
- rorxl $2,%r8d,%r13d
- leal (%r11,%rdx,1),%r11d
- andl %r15d,%edi
- xorl %r12d,%r14d
- xorl %r9d,%edi
- xorl %r13d,%r14d
- leal (%rdx,%rdi,1),%edx
- movl %eax,%r12d
- addl 36(%rsp),%ecx
- andl %r11d,%r12d
- rorxl $25,%r11d,%r13d
- rorxl $11,%r11d,%edi
- leal (%rdx,%r14,1),%edx
- leal (%rcx,%r12,1),%ecx
- andnl %ebx,%r11d,%r12d
- xorl %edi,%r13d
- rorxl $6,%r11d,%r14d
- leal (%rcx,%r12,1),%ecx
- xorl %r14d,%r13d
- movl %edx,%edi
- rorxl $22,%edx,%r12d
- leal (%rcx,%r13,1),%ecx
- xorl %r8d,%edi
- rorxl $13,%edx,%r14d
- rorxl $2,%edx,%r13d
- leal (%r10,%rcx,1),%r10d
- andl %edi,%r15d
- xorl %r12d,%r14d
- xorl %r8d,%r15d
- xorl %r13d,%r14d
- leal (%rcx,%r15,1),%ecx
- movl %r11d,%r12d
- addl 40(%rsp),%ebx
- andl %r10d,%r12d
- rorxl $25,%r10d,%r13d
- rorxl $11,%r10d,%r15d
- leal (%rcx,%r14,1),%ecx
- leal (%rbx,%r12,1),%ebx
- andnl %eax,%r10d,%r12d
- xorl %r15d,%r13d
- rorxl $6,%r10d,%r14d
- leal (%rbx,%r12,1),%ebx
- xorl %r14d,%r13d
- movl %ecx,%r15d
- rorxl $22,%ecx,%r12d
- leal (%rbx,%r13,1),%ebx
- xorl %edx,%r15d
- rorxl $13,%ecx,%r14d
- rorxl $2,%ecx,%r13d
- leal (%r9,%rbx,1),%r9d
- andl %r15d,%edi
- xorl %r12d,%r14d
- xorl %edx,%edi
- xorl %r13d,%r14d
- leal (%rbx,%rdi,1),%ebx
- movl %r10d,%r12d
- addl 44(%rsp),%eax
- andl %r9d,%r12d
- rorxl $25,%r9d,%r13d
- rorxl $11,%r9d,%edi
- leal (%rbx,%r14,1),%ebx
- leal (%rax,%r12,1),%eax
- andnl %r11d,%r9d,%r12d
- xorl %edi,%r13d
- rorxl $6,%r9d,%r14d
- leal (%rax,%r12,1),%eax
- xorl %r14d,%r13d
- movl %ebx,%edi
- rorxl $22,%ebx,%r12d
- leal (%rax,%r13,1),%eax
- xorl %ecx,%edi
- rorxl $13,%ebx,%r14d
- rorxl $2,%ebx,%r13d
- leal (%r8,%rax,1),%r8d
- andl %edi,%r15d
- xorl %r12d,%r14d
- xorl %ecx,%r15d
- xorl %r13d,%r14d
- leal (%rax,%r15,1),%eax
- movl %r9d,%r12d
- movq 512(%rsp),%rdi
- addl %r14d,%eax
-
- leaq 448(%rsp),%rbp
-
- addl 0(%rdi),%eax
- addl 4(%rdi),%ebx
- addl 8(%rdi),%ecx
- addl 12(%rdi),%edx
- addl 16(%rdi),%r8d
- addl 20(%rdi),%r9d
- addl 24(%rdi),%r10d
- addl 28(%rdi),%r11d
-
- movl %eax,0(%rdi)
- movl %ebx,4(%rdi)
- movl %ecx,8(%rdi)
- movl %edx,12(%rdi)
- movl %r8d,16(%rdi)
- movl %r9d,20(%rdi)
- movl %r10d,24(%rdi)
- movl %r11d,28(%rdi)
-
- cmpq 80(%rbp),%rsi
- je .Ldone_avx2
-
- xorl %r14d,%r14d
- movl %ebx,%edi
- xorl %ecx,%edi
- movl %r9d,%r12d
- jmp .Lower_avx2
-.align 16
-.Lower_avx2:
- addl 0+16(%rbp),%r11d
- andl %r8d,%r12d
- rorxl $25,%r8d,%r13d
- rorxl $11,%r8d,%r15d
- leal (%rax,%r14,1),%eax
- leal (%r11,%r12,1),%r11d
- andnl %r10d,%r8d,%r12d
- xorl %r15d,%r13d
- rorxl $6,%r8d,%r14d
- leal (%r11,%r12,1),%r11d
- xorl %r14d,%r13d
- movl %eax,%r15d
- rorxl $22,%eax,%r12d
- leal (%r11,%r13,1),%r11d
- xorl %ebx,%r15d
- rorxl $13,%eax,%r14d
- rorxl $2,%eax,%r13d
- leal (%rdx,%r11,1),%edx
- andl %r15d,%edi
- xorl %r12d,%r14d
- xorl %ebx,%edi
- xorl %r13d,%r14d
- leal (%r11,%rdi,1),%r11d
- movl %r8d,%r12d
- addl 4+16(%rbp),%r10d
- andl %edx,%r12d
- rorxl $25,%edx,%r13d
- rorxl $11,%edx,%edi
- leal (%r11,%r14,1),%r11d
- leal (%r10,%r12,1),%r10d
- andnl %r9d,%edx,%r12d
- xorl %edi,%r13d
- rorxl $6,%edx,%r14d
- leal (%r10,%r12,1),%r10d
- xorl %r14d,%r13d
- movl %r11d,%edi
- rorxl $22,%r11d,%r12d
- leal (%r10,%r13,1),%r10d
- xorl %eax,%edi
- rorxl $13,%r11d,%r14d
- rorxl $2,%r11d,%r13d
- leal (%rcx,%r10,1),%ecx
- andl %edi,%r15d
- xorl %r12d,%r14d
- xorl %eax,%r15d
- xorl %r13d,%r14d
- leal (%r10,%r15,1),%r10d
- movl %edx,%r12d
- addl 8+16(%rbp),%r9d
- andl %ecx,%r12d
- rorxl $25,%ecx,%r13d
- rorxl $11,%ecx,%r15d
- leal (%r10,%r14,1),%r10d
- leal (%r9,%r12,1),%r9d
- andnl %r8d,%ecx,%r12d
- xorl %r15d,%r13d
- rorxl $6,%ecx,%r14d
- leal (%r9,%r12,1),%r9d
- xorl %r14d,%r13d
- movl %r10d,%r15d
- rorxl $22,%r10d,%r12d
- leal (%r9,%r13,1),%r9d
- xorl %r11d,%r15d
- rorxl $13,%r10d,%r14d
- rorxl $2,%r10d,%r13d
- leal (%rbx,%r9,1),%ebx
- andl %r15d,%edi
- xorl %r12d,%r14d
- xorl %r11d,%edi
- xorl %r13d,%r14d
- leal (%r9,%rdi,1),%r9d
- movl %ecx,%r12d
- addl 12+16(%rbp),%r8d
- andl %ebx,%r12d
- rorxl $25,%ebx,%r13d
- rorxl $11,%ebx,%edi
- leal (%r9,%r14,1),%r9d
- leal (%r8,%r12,1),%r8d
- andnl %edx,%ebx,%r12d
- xorl %edi,%r13d
- rorxl $6,%ebx,%r14d
- leal (%r8,%r12,1),%r8d
- xorl %r14d,%r13d
- movl %r9d,%edi
- rorxl $22,%r9d,%r12d
- leal (%r8,%r13,1),%r8d
- xorl %r10d,%edi
- rorxl $13,%r9d,%r14d
- rorxl $2,%r9d,%r13d
- leal (%rax,%r8,1),%eax
- andl %edi,%r15d
- xorl %r12d,%r14d
- xorl %r10d,%r15d
- xorl %r13d,%r14d
- leal (%r8,%r15,1),%r8d
- movl %ebx,%r12d
- addl 32+16(%rbp),%edx
- andl %eax,%r12d
- rorxl $25,%eax,%r13d
- rorxl $11,%eax,%r15d
- leal (%r8,%r14,1),%r8d
- leal (%rdx,%r12,1),%edx
- andnl %ecx,%eax,%r12d
- xorl %r15d,%r13d
- rorxl $6,%eax,%r14d
- leal (%rdx,%r12,1),%edx
- xorl %r14d,%r13d
- movl %r8d,%r15d
- rorxl $22,%r8d,%r12d
- leal (%rdx,%r13,1),%edx
- xorl %r9d,%r15d
- rorxl $13,%r8d,%r14d
- rorxl $2,%r8d,%r13d
- leal (%r11,%rdx,1),%r11d
- andl %r15d,%edi
- xorl %r12d,%r14d
- xorl %r9d,%edi
- xorl %r13d,%r14d
- leal (%rdx,%rdi,1),%edx
- movl %eax,%r12d
- addl 36+16(%rbp),%ecx
- andl %r11d,%r12d
- rorxl $25,%r11d,%r13d
- rorxl $11,%r11d,%edi
- leal (%rdx,%r14,1),%edx
- leal (%rcx,%r12,1),%ecx
- andnl %ebx,%r11d,%r12d
- xorl %edi,%r13d
- rorxl $6,%r11d,%r14d
- leal (%rcx,%r12,1),%ecx
- xorl %r14d,%r13d
- movl %edx,%edi
- rorxl $22,%edx,%r12d
- leal (%rcx,%r13,1),%ecx
- xorl %r8d,%edi
- rorxl $13,%edx,%r14d
- rorxl $2,%edx,%r13d
- leal (%r10,%rcx,1),%r10d
- andl %edi,%r15d
- xorl %r12d,%r14d
- xorl %r8d,%r15d
- xorl %r13d,%r14d
- leal (%rcx,%r15,1),%ecx
- movl %r11d,%r12d
- addl 40+16(%rbp),%ebx
- andl %r10d,%r12d
- rorxl $25,%r10d,%r13d
- rorxl $11,%r10d,%r15d
- leal (%rcx,%r14,1),%ecx
- leal (%rbx,%r12,1),%ebx
- andnl %eax,%r10d,%r12d
- xorl %r15d,%r13d
- rorxl $6,%r10d,%r14d
- leal (%rbx,%r12,1),%ebx
- xorl %r14d,%r13d
- movl %ecx,%r15d
- rorxl $22,%ecx,%r12d
- leal (%rbx,%r13,1),%ebx
- xorl %edx,%r15d
- rorxl $13,%ecx,%r14d
- rorxl $2,%ecx,%r13d
- leal (%r9,%rbx,1),%r9d
- andl %r15d,%edi
- xorl %r12d,%r14d
- xorl %edx,%edi
- xorl %r13d,%r14d
- leal (%rbx,%rdi,1),%ebx
- movl %r10d,%r12d
- addl 44+16(%rbp),%eax
- andl %r9d,%r12d
- rorxl $25,%r9d,%r13d
- rorxl $11,%r9d,%edi
- leal (%rbx,%r14,1),%ebx
- leal (%rax,%r12,1),%eax
- andnl %r11d,%r9d,%r12d
- xorl %edi,%r13d
- rorxl $6,%r9d,%r14d
- leal (%rax,%r12,1),%eax
- xorl %r14d,%r13d
- movl %ebx,%edi
- rorxl $22,%ebx,%r12d
- leal (%rax,%r13,1),%eax
- xorl %ecx,%edi
- rorxl $13,%ebx,%r14d
- rorxl $2,%ebx,%r13d
- leal (%r8,%rax,1),%r8d
- andl %edi,%r15d
- xorl %r12d,%r14d
- xorl %ecx,%r15d
- xorl %r13d,%r14d
- leal (%rax,%r15,1),%eax
- movl %r9d,%r12d
- leaq -64(%rbp),%rbp
- cmpq %rsp,%rbp
- jae .Lower_avx2
-
- movq 512(%rsp),%rdi
- addl %r14d,%eax
-
- leaq 448(%rsp),%rsp
-
- addl 0(%rdi),%eax
- addl 4(%rdi),%ebx
- addl 8(%rdi),%ecx
- addl 12(%rdi),%edx
- addl 16(%rdi),%r8d
- addl 20(%rdi),%r9d
- leaq 128(%rsi),%rsi
- addl 24(%rdi),%r10d
- movq %rsi,%r12
- addl 28(%rdi),%r11d
- cmpq 64+16(%rsp),%rsi
-
- movl %eax,0(%rdi)
- cmoveq %rsp,%r12
- movl %ebx,4(%rdi)
- movl %ecx,8(%rdi)
- movl %edx,12(%rdi)
- movl %r8d,16(%rdi)
- movl %r9d,20(%rdi)
- movl %r10d,24(%rdi)
- movl %r11d,28(%rdi)
-
- jbe .Loop_avx2
- leaq (%rsp),%rbp
-
-.Ldone_avx2:
- leaq (%rbp),%rsp
- movq 88(%rsp),%rsi
-.cfi_def_cfa %rsi,8
- vzeroupper
- movq -48(%rsi),%r15
-.cfi_restore %r15
- movq -40(%rsi),%r14
-.cfi_restore %r14
- movq -32(%rsi),%r13
-.cfi_restore %r13
- movq -24(%rsi),%r12
-.cfi_restore %r12
- movq -16(%rsi),%rbp
-.cfi_restore %rbp
- movq -8(%rsi),%rbx
-.cfi_restore %rbx
- leaq (%rsi),%rsp
-.cfi_def_cfa_register %rsp
-.Lepilogue_avx2:
- .byte 0xf3,0xc3
-.cfi_endproc
-.size sha256_block_data_order_avx2,.-sha256_block_data_order_avx2
diff --git a/secure/lib/libcrypto/amd64/sha512-x86_64.S b/secure/lib/libcrypto/amd64/sha512-x86_64.S
index 095a9bbb0f347..a9b971a1b7cda 100644
--- a/secure/lib/libcrypto/amd64/sha512-x86_64.S
+++ b/secure/lib/libcrypto/amd64/sha512-x86_64.S
@@ -8,20 +8,6 @@
.align 16
sha512_block_data_order:
.cfi_startproc
- leaq OPENSSL_ia32cap_P(%rip),%r11
- movl 0(%r11),%r9d
- movl 4(%r11),%r10d
- movl 8(%r11),%r11d
- testl $2048,%r10d
- jnz .Lxop_shortcut
- andl $296,%r11d
- cmpl $296,%r11d
- je .Lavx2_shortcut
- andl $1073741824,%r9d
- andl $268435968,%r10d
- orl %r9d,%r10d
- cmpl $1342177792,%r10d
- je .Lavx_shortcut
movq %rsp,%rax
.cfi_def_cfa_register %rax
pushq %rbx
@@ -1815,3625 +1801,3 @@ K512:
.quad 0x0001020304050607,0x08090a0b0c0d0e0f
.quad 0x0001020304050607,0x08090a0b0c0d0e0f
.byte 83,72,65,53,49,50,32,98,108,111,99,107,32,116,114,97,110,115,102,111,114,109,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
-.type sha512_block_data_order_xop,@function
-.align 64
-sha512_block_data_order_xop:
-.cfi_startproc
-.Lxop_shortcut:
- movq %rsp,%rax
-.cfi_def_cfa_register %rax
- pushq %rbx
-.cfi_offset %rbx,-16
- pushq %rbp
-.cfi_offset %rbp,-24
- pushq %r12
-.cfi_offset %r12,-32
- pushq %r13
-.cfi_offset %r13,-40
- pushq %r14
-.cfi_offset %r14,-48
- pushq %r15
-.cfi_offset %r15,-56
- shlq $4,%rdx
- subq $160,%rsp
- leaq (%rsi,%rdx,8),%rdx
- andq $-64,%rsp
- movq %rdi,128+0(%rsp)
- movq %rsi,128+8(%rsp)
- movq %rdx,128+16(%rsp)
- movq %rax,152(%rsp)
-.cfi_escape 0x0f,0x06,0x77,0x98,0x01,0x06,0x23,0x08
-.Lprologue_xop:
-
- vzeroupper
- movq 0(%rdi),%rax
- movq 8(%rdi),%rbx
- movq 16(%rdi),%rcx
- movq 24(%rdi),%rdx
- movq 32(%rdi),%r8
- movq 40(%rdi),%r9
- movq 48(%rdi),%r10
- movq 56(%rdi),%r11
- jmp .Lloop_xop
-.align 16
-.Lloop_xop:
- vmovdqa K512+1280(%rip),%xmm11
- vmovdqu 0(%rsi),%xmm0
- leaq K512+128(%rip),%rbp
- vmovdqu 16(%rsi),%xmm1
- vmovdqu 32(%rsi),%xmm2
- vpshufb %xmm11,%xmm0,%xmm0
- vmovdqu 48(%rsi),%xmm3
- vpshufb %xmm11,%xmm1,%xmm1
- vmovdqu 64(%rsi),%xmm4
- vpshufb %xmm11,%xmm2,%xmm2
- vmovdqu 80(%rsi),%xmm5
- vpshufb %xmm11,%xmm3,%xmm3
- vmovdqu 96(%rsi),%xmm6
- vpshufb %xmm11,%xmm4,%xmm4
- vmovdqu 112(%rsi),%xmm7
- vpshufb %xmm11,%xmm5,%xmm5
- vpaddq -128(%rbp),%xmm0,%xmm8
- vpshufb %xmm11,%xmm6,%xmm6
- vpaddq -96(%rbp),%xmm1,%xmm9
- vpshufb %xmm11,%xmm7,%xmm7
- vpaddq -64(%rbp),%xmm2,%xmm10
- vpaddq -32(%rbp),%xmm3,%xmm11
- vmovdqa %xmm8,0(%rsp)
- vpaddq 0(%rbp),%xmm4,%xmm8
- vmovdqa %xmm9,16(%rsp)
- vpaddq 32(%rbp),%xmm5,%xmm9
- vmovdqa %xmm10,32(%rsp)
- vpaddq 64(%rbp),%xmm6,%xmm10
- vmovdqa %xmm11,48(%rsp)
- vpaddq 96(%rbp),%xmm7,%xmm11
- vmovdqa %xmm8,64(%rsp)
- movq %rax,%r14
- vmovdqa %xmm9,80(%rsp)
- movq %rbx,%rdi
- vmovdqa %xmm10,96(%rsp)
- xorq %rcx,%rdi
- vmovdqa %xmm11,112(%rsp)
- movq %r8,%r13
- jmp .Lxop_00_47
-
-.align 16
-.Lxop_00_47:
- addq $256,%rbp
- vpalignr $8,%xmm0,%xmm1,%xmm8
- rorq $23,%r13
- movq %r14,%rax
- vpalignr $8,%xmm4,%xmm5,%xmm11
- movq %r9,%r12
- rorq $5,%r14
-.byte 143,72,120,195,200,56
- xorq %r8,%r13
- xorq %r10,%r12
- vpsrlq $7,%xmm8,%xmm8
- rorq $4,%r13
- xorq %rax,%r14
- vpaddq %xmm11,%xmm0,%xmm0
- andq %r8,%r12
- xorq %r8,%r13
- addq 0(%rsp),%r11
- movq %rax,%r15
-.byte 143,72,120,195,209,7
- xorq %r10,%r12
- rorq $6,%r14
- vpxor %xmm9,%xmm8,%xmm8
- xorq %rbx,%r15
- addq %r12,%r11
- rorq $14,%r13
- andq %r15,%rdi
-.byte 143,104,120,195,223,3
- xorq %rax,%r14
- addq %r13,%r11
- vpxor %xmm10,%xmm8,%xmm8
- xorq %rbx,%rdi
- rorq $28,%r14
- vpsrlq $6,%xmm7,%xmm10
- addq %r11,%rdx
- addq %rdi,%r11
- vpaddq %xmm8,%xmm0,%xmm0
- movq %rdx,%r13
- addq %r11,%r14
-.byte 143,72,120,195,203,42
- rorq $23,%r13
- movq %r14,%r11
- vpxor %xmm10,%xmm11,%xmm11
- movq %r8,%r12
- rorq $5,%r14
- xorq %rdx,%r13
- xorq %r9,%r12
- vpxor %xmm9,%xmm11,%xmm11
- rorq $4,%r13
- xorq %r11,%r14
- andq %rdx,%r12
- xorq %rdx,%r13
- vpaddq %xmm11,%xmm0,%xmm0
- addq 8(%rsp),%r10
- movq %r11,%rdi
- xorq %r9,%r12
- rorq $6,%r14
- vpaddq -128(%rbp),%xmm0,%xmm10
- xorq %rax,%rdi
- addq %r12,%r10
- rorq $14,%r13
- andq %rdi,%r15
- xorq %r11,%r14
- addq %r13,%r10
- xorq %rax,%r15
- rorq $28,%r14
- addq %r10,%rcx
- addq %r15,%r10
- movq %rcx,%r13
- addq %r10,%r14
- vmovdqa %xmm10,0(%rsp)
- vpalignr $8,%xmm1,%xmm2,%xmm8
- rorq $23,%r13
- movq %r14,%r10
- vpalignr $8,%xmm5,%xmm6,%xmm11
- movq %rdx,%r12
- rorq $5,%r14
-.byte 143,72,120,195,200,56
- xorq %rcx,%r13
- xorq %r8,%r12
- vpsrlq $7,%xmm8,%xmm8
- rorq $4,%r13
- xorq %r10,%r14
- vpaddq %xmm11,%xmm1,%xmm1
- andq %rcx,%r12
- xorq %rcx,%r13
- addq 16(%rsp),%r9
- movq %r10,%r15
-.byte 143,72,120,195,209,7
- xorq %r8,%r12
- rorq $6,%r14
- vpxor %xmm9,%xmm8,%xmm8
- xorq %r11,%r15
- addq %r12,%r9
- rorq $14,%r13
- andq %r15,%rdi
-.byte 143,104,120,195,216,3
- xorq %r10,%r14
- addq %r13,%r9
- vpxor %xmm10,%xmm8,%xmm8
- xorq %r11,%rdi
- rorq $28,%r14
- vpsrlq $6,%xmm0,%xmm10
- addq %r9,%rbx
- addq %rdi,%r9
- vpaddq %xmm8,%xmm1,%xmm1
- movq %rbx,%r13
- addq %r9,%r14
-.byte 143,72,120,195,203,42
- rorq $23,%r13
- movq %r14,%r9
- vpxor %xmm10,%xmm11,%xmm11
- movq %rcx,%r12
- rorq $5,%r14
- xorq %rbx,%r13
- xorq %rdx,%r12
- vpxor %xmm9,%xmm11,%xmm11
- rorq $4,%r13
- xorq %r9,%r14
- andq %rbx,%r12
- xorq %rbx,%r13
- vpaddq %xmm11,%xmm1,%xmm1
- addq 24(%rsp),%r8
- movq %r9,%rdi
- xorq %rdx,%r12
- rorq $6,%r14
- vpaddq -96(%rbp),%xmm1,%xmm10
- xorq %r10,%rdi
- addq %r12,%r8
- rorq $14,%r13
- andq %rdi,%r15
- xorq %r9,%r14
- addq %r13,%r8
- xorq %r10,%r15
- rorq $28,%r14
- addq %r8,%rax
- addq %r15,%r8
- movq %rax,%r13
- addq %r8,%r14
- vmovdqa %xmm10,16(%rsp)
- vpalignr $8,%xmm2,%xmm3,%xmm8
- rorq $23,%r13
- movq %r14,%r8
- vpalignr $8,%xmm6,%xmm7,%xmm11
- movq %rbx,%r12
- rorq $5,%r14
-.byte 143,72,120,195,200,56
- xorq %rax,%r13
- xorq %rcx,%r12
- vpsrlq $7,%xmm8,%xmm8
- rorq $4,%r13
- xorq %r8,%r14
- vpaddq %xmm11,%xmm2,%xmm2
- andq %rax,%r12
- xorq %rax,%r13
- addq 32(%rsp),%rdx
- movq %r8,%r15
-.byte 143,72,120,195,209,7
- xorq %rcx,%r12
- rorq $6,%r14
- vpxor %xmm9,%xmm8,%xmm8
- xorq %r9,%r15
- addq %r12,%rdx
- rorq $14,%r13
- andq %r15,%rdi
-.byte 143,104,120,195,217,3
- xorq %r8,%r14
- addq %r13,%rdx
- vpxor %xmm10,%xmm8,%xmm8
- xorq %r9,%rdi
- rorq $28,%r14
- vpsrlq $6,%xmm1,%xmm10
- addq %rdx,%r11
- addq %rdi,%rdx
- vpaddq %xmm8,%xmm2,%xmm2
- movq %r11,%r13
- addq %rdx,%r14
-.byte 143,72,120,195,203,42
- rorq $23,%r13
- movq %r14,%rdx
- vpxor %xmm10,%xmm11,%xmm11
- movq %rax,%r12
- rorq $5,%r14
- xorq %r11,%r13
- xorq %rbx,%r12
- vpxor %xmm9,%xmm11,%xmm11
- rorq $4,%r13
- xorq %rdx,%r14
- andq %r11,%r12
- xorq %r11,%r13
- vpaddq %xmm11,%xmm2,%xmm2
- addq 40(%rsp),%rcx
- movq %rdx,%rdi
- xorq %rbx,%r12
- rorq $6,%r14
- vpaddq -64(%rbp),%xmm2,%xmm10
- xorq %r8,%rdi
- addq %r12,%rcx
- rorq $14,%r13
- andq %rdi,%r15
- xorq %rdx,%r14
- addq %r13,%rcx
- xorq %r8,%r15
- rorq $28,%r14
- addq %rcx,%r10
- addq %r15,%rcx
- movq %r10,%r13
- addq %rcx,%r14
- vmovdqa %xmm10,32(%rsp)
- vpalignr $8,%xmm3,%xmm4,%xmm8
- rorq $23,%r13
- movq %r14,%rcx
- vpalignr $8,%xmm7,%xmm0,%xmm11
- movq %r11,%r12
- rorq $5,%r14
-.byte 143,72,120,195,200,56
- xorq %r10,%r13
- xorq %rax,%r12
- vpsrlq $7,%xmm8,%xmm8
- rorq $4,%r13
- xorq %rcx,%r14
- vpaddq %xmm11,%xmm3,%xmm3
- andq %r10,%r12
- xorq %r10,%r13
- addq 48(%rsp),%rbx
- movq %rcx,%r15
-.byte 143,72,120,195,209,7
- xorq %rax,%r12
- rorq $6,%r14
- vpxor %xmm9,%xmm8,%xmm8
- xorq %rdx,%r15
- addq %r12,%rbx
- rorq $14,%r13
- andq %r15,%rdi
-.byte 143,104,120,195,218,3
- xorq %rcx,%r14
- addq %r13,%rbx
- vpxor %xmm10,%xmm8,%xmm8
- xorq %rdx,%rdi
- rorq $28,%r14
- vpsrlq $6,%xmm2,%xmm10
- addq %rbx,%r9
- addq %rdi,%rbx
- vpaddq %xmm8,%xmm3,%xmm3
- movq %r9,%r13
- addq %rbx,%r14
-.byte 143,72,120,195,203,42
- rorq $23,%r13
- movq %r14,%rbx
- vpxor %xmm10,%xmm11,%xmm11
- movq %r10,%r12
- rorq $5,%r14
- xorq %r9,%r13
- xorq %r11,%r12
- vpxor %xmm9,%xmm11,%xmm11
- rorq $4,%r13
- xorq %rbx,%r14
- andq %r9,%r12
- xorq %r9,%r13
- vpaddq %xmm11,%xmm3,%xmm3
- addq 56(%rsp),%rax
- movq %rbx,%rdi
- xorq %r11,%r12
- rorq $6,%r14
- vpaddq -32(%rbp),%xmm3,%xmm10
- xorq %rcx,%rdi
- addq %r12,%rax
- rorq $14,%r13
- andq %rdi,%r15
- xorq %rbx,%r14
- addq %r13,%rax
- xorq %rcx,%r15
- rorq $28,%r14
- addq %rax,%r8
- addq %r15,%rax
- movq %r8,%r13
- addq %rax,%r14
- vmovdqa %xmm10,48(%rsp)
- vpalignr $8,%xmm4,%xmm5,%xmm8
- rorq $23,%r13
- movq %r14,%rax
- vpalignr $8,%xmm0,%xmm1,%xmm11
- movq %r9,%r12
- rorq $5,%r14
-.byte 143,72,120,195,200,56
- xorq %r8,%r13
- xorq %r10,%r12
- vpsrlq $7,%xmm8,%xmm8
- rorq $4,%r13
- xorq %rax,%r14
- vpaddq %xmm11,%xmm4,%xmm4
- andq %r8,%r12
- xorq %r8,%r13
- addq 64(%rsp),%r11
- movq %rax,%r15
-.byte 143,72,120,195,209,7
- xorq %r10,%r12
- rorq $6,%r14
- vpxor %xmm9,%xmm8,%xmm8
- xorq %rbx,%r15
- addq %r12,%r11
- rorq $14,%r13
- andq %r15,%rdi
-.byte 143,104,120,195,219,3
- xorq %rax,%r14
- addq %r13,%r11
- vpxor %xmm10,%xmm8,%xmm8
- xorq %rbx,%rdi
- rorq $28,%r14
- vpsrlq $6,%xmm3,%xmm10
- addq %r11,%rdx
- addq %rdi,%r11
- vpaddq %xmm8,%xmm4,%xmm4
- movq %rdx,%r13
- addq %r11,%r14
-.byte 143,72,120,195,203,42
- rorq $23,%r13
- movq %r14,%r11
- vpxor %xmm10,%xmm11,%xmm11
- movq %r8,%r12
- rorq $5,%r14
- xorq %rdx,%r13
- xorq %r9,%r12
- vpxor %xmm9,%xmm11,%xmm11
- rorq $4,%r13
- xorq %r11,%r14
- andq %rdx,%r12
- xorq %rdx,%r13
- vpaddq %xmm11,%xmm4,%xmm4
- addq 72(%rsp),%r10
- movq %r11,%rdi
- xorq %r9,%r12
- rorq $6,%r14
- vpaddq 0(%rbp),%xmm4,%xmm10
- xorq %rax,%rdi
- addq %r12,%r10
- rorq $14,%r13
- andq %rdi,%r15
- xorq %r11,%r14
- addq %r13,%r10
- xorq %rax,%r15
- rorq $28,%r14
- addq %r10,%rcx
- addq %r15,%r10
- movq %rcx,%r13
- addq %r10,%r14
- vmovdqa %xmm10,64(%rsp)
- vpalignr $8,%xmm5,%xmm6,%xmm8
- rorq $23,%r13
- movq %r14,%r10
- vpalignr $8,%xmm1,%xmm2,%xmm11
- movq %rdx,%r12
- rorq $5,%r14
-.byte 143,72,120,195,200,56
- xorq %rcx,%r13
- xorq %r8,%r12
- vpsrlq $7,%xmm8,%xmm8
- rorq $4,%r13
- xorq %r10,%r14
- vpaddq %xmm11,%xmm5,%xmm5
- andq %rcx,%r12
- xorq %rcx,%r13
- addq 80(%rsp),%r9
- movq %r10,%r15
-.byte 143,72,120,195,209,7
- xorq %r8,%r12
- rorq $6,%r14
- vpxor %xmm9,%xmm8,%xmm8
- xorq %r11,%r15
- addq %r12,%r9
- rorq $14,%r13
- andq %r15,%rdi
-.byte 143,104,120,195,220,3
- xorq %r10,%r14
- addq %r13,%r9
- vpxor %xmm10,%xmm8,%xmm8
- xorq %r11,%rdi
- rorq $28,%r14
- vpsrlq $6,%xmm4,%xmm10
- addq %r9,%rbx
- addq %rdi,%r9
- vpaddq %xmm8,%xmm5,%xmm5
- movq %rbx,%r13
- addq %r9,%r14
-.byte 143,72,120,195,203,42
- rorq $23,%r13
- movq %r14,%r9
- vpxor %xmm10,%xmm11,%xmm11
- movq %rcx,%r12
- rorq $5,%r14
- xorq %rbx,%r13
- xorq %rdx,%r12
- vpxor %xmm9,%xmm11,%xmm11
- rorq $4,%r13
- xorq %r9,%r14
- andq %rbx,%r12
- xorq %rbx,%r13
- vpaddq %xmm11,%xmm5,%xmm5
- addq 88(%rsp),%r8
- movq %r9,%rdi
- xorq %rdx,%r12
- rorq $6,%r14
- vpaddq 32(%rbp),%xmm5,%xmm10
- xorq %r10,%rdi
- addq %r12,%r8
- rorq $14,%r13
- andq %rdi,%r15
- xorq %r9,%r14
- addq %r13,%r8
- xorq %r10,%r15
- rorq $28,%r14
- addq %r8,%rax
- addq %r15,%r8
- movq %rax,%r13
- addq %r8,%r14
- vmovdqa %xmm10,80(%rsp)
- vpalignr $8,%xmm6,%xmm7,%xmm8
- rorq $23,%r13
- movq %r14,%r8
- vpalignr $8,%xmm2,%xmm3,%xmm11
- movq %rbx,%r12
- rorq $5,%r14
-.byte 143,72,120,195,200,56
- xorq %rax,%r13
- xorq %rcx,%r12
- vpsrlq $7,%xmm8,%xmm8
- rorq $4,%r13
- xorq %r8,%r14
- vpaddq %xmm11,%xmm6,%xmm6
- andq %rax,%r12
- xorq %rax,%r13
- addq 96(%rsp),%rdx
- movq %r8,%r15
-.byte 143,72,120,195,209,7
- xorq %rcx,%r12
- rorq $6,%r14
- vpxor %xmm9,%xmm8,%xmm8
- xorq %r9,%r15
- addq %r12,%rdx
- rorq $14,%r13
- andq %r15,%rdi
-.byte 143,104,120,195,221,3
- xorq %r8,%r14
- addq %r13,%rdx
- vpxor %xmm10,%xmm8,%xmm8
- xorq %r9,%rdi
- rorq $28,%r14
- vpsrlq $6,%xmm5,%xmm10
- addq %rdx,%r11
- addq %rdi,%rdx
- vpaddq %xmm8,%xmm6,%xmm6
- movq %r11,%r13
- addq %rdx,%r14
-.byte 143,72,120,195,203,42
- rorq $23,%r13
- movq %r14,%rdx
- vpxor %xmm10,%xmm11,%xmm11
- movq %rax,%r12
- rorq $5,%r14
- xorq %r11,%r13
- xorq %rbx,%r12
- vpxor %xmm9,%xmm11,%xmm11
- rorq $4,%r13
- xorq %rdx,%r14
- andq %r11,%r12
- xorq %r11,%r13
- vpaddq %xmm11,%xmm6,%xmm6
- addq 104(%rsp),%rcx
- movq %rdx,%rdi
- xorq %rbx,%r12
- rorq $6,%r14
- vpaddq 64(%rbp),%xmm6,%xmm10
- xorq %r8,%rdi
- addq %r12,%rcx
- rorq $14,%r13
- andq %rdi,%r15
- xorq %rdx,%r14
- addq %r13,%rcx
- xorq %r8,%r15
- rorq $28,%r14
- addq %rcx,%r10
- addq %r15,%rcx
- movq %r10,%r13
- addq %rcx,%r14
- vmovdqa %xmm10,96(%rsp)
- vpalignr $8,%xmm7,%xmm0,%xmm8
- rorq $23,%r13
- movq %r14,%rcx
- vpalignr $8,%xmm3,%xmm4,%xmm11
- movq %r11,%r12
- rorq $5,%r14
-.byte 143,72,120,195,200,56
- xorq %r10,%r13
- xorq %rax,%r12
- vpsrlq $7,%xmm8,%xmm8
- rorq $4,%r13
- xorq %rcx,%r14
- vpaddq %xmm11,%xmm7,%xmm7
- andq %r10,%r12
- xorq %r10,%r13
- addq 112(%rsp),%rbx
- movq %rcx,%r15
-.byte 143,72,120,195,209,7
- xorq %rax,%r12
- rorq $6,%r14
- vpxor %xmm9,%xmm8,%xmm8
- xorq %rdx,%r15
- addq %r12,%rbx
- rorq $14,%r13
- andq %r15,%rdi
-.byte 143,104,120,195,222,3
- xorq %rcx,%r14
- addq %r13,%rbx
- vpxor %xmm10,%xmm8,%xmm8
- xorq %rdx,%rdi
- rorq $28,%r14
- vpsrlq $6,%xmm6,%xmm10
- addq %rbx,%r9
- addq %rdi,%rbx
- vpaddq %xmm8,%xmm7,%xmm7
- movq %r9,%r13
- addq %rbx,%r14
-.byte 143,72,120,195,203,42
- rorq $23,%r13
- movq %r14,%rbx
- vpxor %xmm10,%xmm11,%xmm11
- movq %r10,%r12
- rorq $5,%r14
- xorq %r9,%r13
- xorq %r11,%r12
- vpxor %xmm9,%xmm11,%xmm11
- rorq $4,%r13
- xorq %rbx,%r14
- andq %r9,%r12
- xorq %r9,%r13
- vpaddq %xmm11,%xmm7,%xmm7
- addq 120(%rsp),%rax
- movq %rbx,%rdi
- xorq %r11,%r12
- rorq $6,%r14
- vpaddq 96(%rbp),%xmm7,%xmm10
- xorq %rcx,%rdi
- addq %r12,%rax
- rorq $14,%r13
- andq %rdi,%r15
- xorq %rbx,%r14
- addq %r13,%rax
- xorq %rcx,%r15
- rorq $28,%r14
- addq %rax,%r8
- addq %r15,%rax
- movq %r8,%r13
- addq %rax,%r14
- vmovdqa %xmm10,112(%rsp)
- cmpb $0,135(%rbp)
- jne .Lxop_00_47
- rorq $23,%r13
- movq %r14,%rax
- movq %r9,%r12
- rorq $5,%r14
- xorq %r8,%r13
- xorq %r10,%r12
- rorq $4,%r13
- xorq %rax,%r14
- andq %r8,%r12
- xorq %r8,%r13
- addq 0(%rsp),%r11
- movq %rax,%r15
- xorq %r10,%r12
- rorq $6,%r14
- xorq %rbx,%r15
- addq %r12,%r11
- rorq $14,%r13
- andq %r15,%rdi
- xorq %rax,%r14
- addq %r13,%r11
- xorq %rbx,%rdi
- rorq $28,%r14
- addq %r11,%rdx
- addq %rdi,%r11
- movq %rdx,%r13
- addq %r11,%r14
- rorq $23,%r13
- movq %r14,%r11
- movq %r8,%r12
- rorq $5,%r14
- xorq %rdx,%r13
- xorq %r9,%r12
- rorq $4,%r13
- xorq %r11,%r14
- andq %rdx,%r12
- xorq %rdx,%r13
- addq 8(%rsp),%r10
- movq %r11,%rdi
- xorq %r9,%r12
- rorq $6,%r14
- xorq %rax,%rdi
- addq %r12,%r10
- rorq $14,%r13
- andq %rdi,%r15
- xorq %r11,%r14
- addq %r13,%r10
- xorq %rax,%r15
- rorq $28,%r14
- addq %r10,%rcx
- addq %r15,%r10
- movq %rcx,%r13
- addq %r10,%r14
- rorq $23,%r13
- movq %r14,%r10
- movq %rdx,%r12
- rorq $5,%r14
- xorq %rcx,%r13
- xorq %r8,%r12
- rorq $4,%r13
- xorq %r10,%r14
- andq %rcx,%r12
- xorq %rcx,%r13
- addq 16(%rsp),%r9
- movq %r10,%r15
- xorq %r8,%r12
- rorq $6,%r14
- xorq %r11,%r15
- addq %r12,%r9
- rorq $14,%r13
- andq %r15,%rdi
- xorq %r10,%r14
- addq %r13,%r9
- xorq %r11,%rdi
- rorq $28,%r14
- addq %r9,%rbx
- addq %rdi,%r9
- movq %rbx,%r13
- addq %r9,%r14
- rorq $23,%r13
- movq %r14,%r9
- movq %rcx,%r12
- rorq $5,%r14
- xorq %rbx,%r13
- xorq %rdx,%r12
- rorq $4,%r13
- xorq %r9,%r14
- andq %rbx,%r12
- xorq %rbx,%r13
- addq 24(%rsp),%r8
- movq %r9,%rdi
- xorq %rdx,%r12
- rorq $6,%r14
- xorq %r10,%rdi
- addq %r12,%r8
- rorq $14,%r13
- andq %rdi,%r15
- xorq %r9,%r14
- addq %r13,%r8
- xorq %r10,%r15
- rorq $28,%r14
- addq %r8,%rax
- addq %r15,%r8
- movq %rax,%r13
- addq %r8,%r14
- rorq $23,%r13
- movq %r14,%r8
- movq %rbx,%r12
- rorq $5,%r14
- xorq %rax,%r13
- xorq %rcx,%r12
- rorq $4,%r13
- xorq %r8,%r14
- andq %rax,%r12
- xorq %rax,%r13
- addq 32(%rsp),%rdx
- movq %r8,%r15
- xorq %rcx,%r12
- rorq $6,%r14
- xorq %r9,%r15
- addq %r12,%rdx
- rorq $14,%r13
- andq %r15,%rdi
- xorq %r8,%r14
- addq %r13,%rdx
- xorq %r9,%rdi
- rorq $28,%r14
- addq %rdx,%r11
- addq %rdi,%rdx
- movq %r11,%r13
- addq %rdx,%r14
- rorq $23,%r13
- movq %r14,%rdx
- movq %rax,%r12
- rorq $5,%r14
- xorq %r11,%r13
- xorq %rbx,%r12
- rorq $4,%r13
- xorq %rdx,%r14
- andq %r11,%r12
- xorq %r11,%r13
- addq 40(%rsp),%rcx
- movq %rdx,%rdi
- xorq %rbx,%r12
- rorq $6,%r14
- xorq %r8,%rdi
- addq %r12,%rcx
- rorq $14,%r13
- andq %rdi,%r15
- xorq %rdx,%r14
- addq %r13,%rcx
- xorq %r8,%r15
- rorq $28,%r14
- addq %rcx,%r10
- addq %r15,%rcx
- movq %r10,%r13
- addq %rcx,%r14
- rorq $23,%r13
- movq %r14,%rcx
- movq %r11,%r12
- rorq $5,%r14
- xorq %r10,%r13
- xorq %rax,%r12
- rorq $4,%r13
- xorq %rcx,%r14
- andq %r10,%r12
- xorq %r10,%r13
- addq 48(%rsp),%rbx
- movq %rcx,%r15
- xorq %rax,%r12
- rorq $6,%r14
- xorq %rdx,%r15
- addq %r12,%rbx
- rorq $14,%r13
- andq %r15,%rdi
- xorq %rcx,%r14
- addq %r13,%rbx
- xorq %rdx,%rdi
- rorq $28,%r14
- addq %rbx,%r9
- addq %rdi,%rbx
- movq %r9,%r13
- addq %rbx,%r14
- rorq $23,%r13
- movq %r14,%rbx
- movq %r10,%r12
- rorq $5,%r14
- xorq %r9,%r13
- xorq %r11,%r12
- rorq $4,%r13
- xorq %rbx,%r14
- andq %r9,%r12
- xorq %r9,%r13
- addq 56(%rsp),%rax
- movq %rbx,%rdi
- xorq %r11,%r12
- rorq $6,%r14
- xorq %rcx,%rdi
- addq %r12,%rax
- rorq $14,%r13
- andq %rdi,%r15
- xorq %rbx,%r14
- addq %r13,%rax
- xorq %rcx,%r15
- rorq $28,%r14
- addq %rax,%r8
- addq %r15,%rax
- movq %r8,%r13
- addq %rax,%r14
- rorq $23,%r13
- movq %r14,%rax
- movq %r9,%r12
- rorq $5,%r14
- xorq %r8,%r13
- xorq %r10,%r12
- rorq $4,%r13
- xorq %rax,%r14
- andq %r8,%r12
- xorq %r8,%r13
- addq 64(%rsp),%r11
- movq %rax,%r15
- xorq %r10,%r12
- rorq $6,%r14
- xorq %rbx,%r15
- addq %r12,%r11
- rorq $14,%r13
- andq %r15,%rdi
- xorq %rax,%r14
- addq %r13,%r11
- xorq %rbx,%rdi
- rorq $28,%r14
- addq %r11,%rdx
- addq %rdi,%r11
- movq %rdx,%r13
- addq %r11,%r14
- rorq $23,%r13
- movq %r14,%r11
- movq %r8,%r12
- rorq $5,%r14
- xorq %rdx,%r13
- xorq %r9,%r12
- rorq $4,%r13
- xorq %r11,%r14
- andq %rdx,%r12
- xorq %rdx,%r13
- addq 72(%rsp),%r10
- movq %r11,%rdi
- xorq %r9,%r12
- rorq $6,%r14
- xorq %rax,%rdi
- addq %r12,%r10
- rorq $14,%r13
- andq %rdi,%r15
- xorq %r11,%r14
- addq %r13,%r10
- xorq %rax,%r15
- rorq $28,%r14
- addq %r10,%rcx
- addq %r15,%r10
- movq %rcx,%r13
- addq %r10,%r14
- rorq $23,%r13
- movq %r14,%r10
- movq %rdx,%r12
- rorq $5,%r14
- xorq %rcx,%r13
- xorq %r8,%r12
- rorq $4,%r13
- xorq %r10,%r14
- andq %rcx,%r12
- xorq %rcx,%r13
- addq 80(%rsp),%r9
- movq %r10,%r15
- xorq %r8,%r12
- rorq $6,%r14
- xorq %r11,%r15
- addq %r12,%r9
- rorq $14,%r13
- andq %r15,%rdi
- xorq %r10,%r14
- addq %r13,%r9
- xorq %r11,%rdi
- rorq $28,%r14
- addq %r9,%rbx
- addq %rdi,%r9
- movq %rbx,%r13
- addq %r9,%r14
- rorq $23,%r13
- movq %r14,%r9
- movq %rcx,%r12
- rorq $5,%r14
- xorq %rbx,%r13
- xorq %rdx,%r12
- rorq $4,%r13
- xorq %r9,%r14
- andq %rbx,%r12
- xorq %rbx,%r13
- addq 88(%rsp),%r8
- movq %r9,%rdi
- xorq %rdx,%r12
- rorq $6,%r14
- xorq %r10,%rdi
- addq %r12,%r8
- rorq $14,%r13
- andq %rdi,%r15
- xorq %r9,%r14
- addq %r13,%r8
- xorq %r10,%r15
- rorq $28,%r14
- addq %r8,%rax
- addq %r15,%r8
- movq %rax,%r13
- addq %r8,%r14
- rorq $23,%r13
- movq %r14,%r8
- movq %rbx,%r12
- rorq $5,%r14
- xorq %rax,%r13
- xorq %rcx,%r12
- rorq $4,%r13
- xorq %r8,%r14
- andq %rax,%r12
- xorq %rax,%r13
- addq 96(%rsp),%rdx
- movq %r8,%r15
- xorq %rcx,%r12
- rorq $6,%r14
- xorq %r9,%r15
- addq %r12,%rdx
- rorq $14,%r13
- andq %r15,%rdi
- xorq %r8,%r14
- addq %r13,%rdx
- xorq %r9,%rdi
- rorq $28,%r14
- addq %rdx,%r11
- addq %rdi,%rdx
- movq %r11,%r13
- addq %rdx,%r14
- rorq $23,%r13
- movq %r14,%rdx
- movq %rax,%r12
- rorq $5,%r14
- xorq %r11,%r13
- xorq %rbx,%r12
- rorq $4,%r13
- xorq %rdx,%r14
- andq %r11,%r12
- xorq %r11,%r13
- addq 104(%rsp),%rcx
- movq %rdx,%rdi
- xorq %rbx,%r12
- rorq $6,%r14
- xorq %r8,%rdi
- addq %r12,%rcx
- rorq $14,%r13
- andq %rdi,%r15
- xorq %rdx,%r14
- addq %r13,%rcx
- xorq %r8,%r15
- rorq $28,%r14
- addq %rcx,%r10
- addq %r15,%rcx
- movq %r10,%r13
- addq %rcx,%r14
- rorq $23,%r13
- movq %r14,%rcx
- movq %r11,%r12
- rorq $5,%r14
- xorq %r10,%r13
- xorq %rax,%r12
- rorq $4,%r13
- xorq %rcx,%r14
- andq %r10,%r12
- xorq %r10,%r13
- addq 112(%rsp),%rbx
- movq %rcx,%r15
- xorq %rax,%r12
- rorq $6,%r14
- xorq %rdx,%r15
- addq %r12,%rbx
- rorq $14,%r13
- andq %r15,%rdi
- xorq %rcx,%r14
- addq %r13,%rbx
- xorq %rdx,%rdi
- rorq $28,%r14
- addq %rbx,%r9
- addq %rdi,%rbx
- movq %r9,%r13
- addq %rbx,%r14
- rorq $23,%r13
- movq %r14,%rbx
- movq %r10,%r12
- rorq $5,%r14
- xorq %r9,%r13
- xorq %r11,%r12
- rorq $4,%r13
- xorq %rbx,%r14
- andq %r9,%r12
- xorq %r9,%r13
- addq 120(%rsp),%rax
- movq %rbx,%rdi
- xorq %r11,%r12
- rorq $6,%r14
- xorq %rcx,%rdi
- addq %r12,%rax
- rorq $14,%r13
- andq %rdi,%r15
- xorq %rbx,%r14
- addq %r13,%rax
- xorq %rcx,%r15
- rorq $28,%r14
- addq %rax,%r8
- addq %r15,%rax
- movq %r8,%r13
- addq %rax,%r14
- movq 128+0(%rsp),%rdi
- movq %r14,%rax
-
- addq 0(%rdi),%rax
- leaq 128(%rsi),%rsi
- addq 8(%rdi),%rbx
- addq 16(%rdi),%rcx
- addq 24(%rdi),%rdx
- addq 32(%rdi),%r8
- addq 40(%rdi),%r9
- addq 48(%rdi),%r10
- addq 56(%rdi),%r11
-
- cmpq 128+16(%rsp),%rsi
-
- movq %rax,0(%rdi)
- movq %rbx,8(%rdi)
- movq %rcx,16(%rdi)
- movq %rdx,24(%rdi)
- movq %r8,32(%rdi)
- movq %r9,40(%rdi)
- movq %r10,48(%rdi)
- movq %r11,56(%rdi)
- jb .Lloop_xop
-
- movq 152(%rsp),%rsi
-.cfi_def_cfa %rsi,8
- vzeroupper
- movq -48(%rsi),%r15
-.cfi_restore %r15
- movq -40(%rsi),%r14
-.cfi_restore %r14
- movq -32(%rsi),%r13
-.cfi_restore %r13
- movq -24(%rsi),%r12
-.cfi_restore %r12
- movq -16(%rsi),%rbp
-.cfi_restore %rbp
- movq -8(%rsi),%rbx
-.cfi_restore %rbx
- leaq (%rsi),%rsp
-.cfi_def_cfa_register %rsp
-.Lepilogue_xop:
- .byte 0xf3,0xc3
-.cfi_endproc
-.size sha512_block_data_order_xop,.-sha512_block_data_order_xop
-.type sha512_block_data_order_avx,@function
-.align 64
-sha512_block_data_order_avx:
-.cfi_startproc
-.Lavx_shortcut:
- movq %rsp,%rax
-.cfi_def_cfa_register %rax
- pushq %rbx
-.cfi_offset %rbx,-16
- pushq %rbp
-.cfi_offset %rbp,-24
- pushq %r12
-.cfi_offset %r12,-32
- pushq %r13
-.cfi_offset %r13,-40
- pushq %r14
-.cfi_offset %r14,-48
- pushq %r15
-.cfi_offset %r15,-56
- shlq $4,%rdx
- subq $160,%rsp
- leaq (%rsi,%rdx,8),%rdx
- andq $-64,%rsp
- movq %rdi,128+0(%rsp)
- movq %rsi,128+8(%rsp)
- movq %rdx,128+16(%rsp)
- movq %rax,152(%rsp)
-.cfi_escape 0x0f,0x06,0x77,0x98,0x01,0x06,0x23,0x08
-.Lprologue_avx:
-
- vzeroupper
- movq 0(%rdi),%rax
- movq 8(%rdi),%rbx
- movq 16(%rdi),%rcx
- movq 24(%rdi),%rdx
- movq 32(%rdi),%r8
- movq 40(%rdi),%r9
- movq 48(%rdi),%r10
- movq 56(%rdi),%r11
- jmp .Lloop_avx
-.align 16
-.Lloop_avx:
- vmovdqa K512+1280(%rip),%xmm11
- vmovdqu 0(%rsi),%xmm0
- leaq K512+128(%rip),%rbp
- vmovdqu 16(%rsi),%xmm1
- vmovdqu 32(%rsi),%xmm2
- vpshufb %xmm11,%xmm0,%xmm0
- vmovdqu 48(%rsi),%xmm3
- vpshufb %xmm11,%xmm1,%xmm1
- vmovdqu 64(%rsi),%xmm4
- vpshufb %xmm11,%xmm2,%xmm2
- vmovdqu 80(%rsi),%xmm5
- vpshufb %xmm11,%xmm3,%xmm3
- vmovdqu 96(%rsi),%xmm6
- vpshufb %xmm11,%xmm4,%xmm4
- vmovdqu 112(%rsi),%xmm7
- vpshufb %xmm11,%xmm5,%xmm5
- vpaddq -128(%rbp),%xmm0,%xmm8
- vpshufb %xmm11,%xmm6,%xmm6
- vpaddq -96(%rbp),%xmm1,%xmm9
- vpshufb %xmm11,%xmm7,%xmm7
- vpaddq -64(%rbp),%xmm2,%xmm10
- vpaddq -32(%rbp),%xmm3,%xmm11
- vmovdqa %xmm8,0(%rsp)
- vpaddq 0(%rbp),%xmm4,%xmm8
- vmovdqa %xmm9,16(%rsp)
- vpaddq 32(%rbp),%xmm5,%xmm9
- vmovdqa %xmm10,32(%rsp)
- vpaddq 64(%rbp),%xmm6,%xmm10
- vmovdqa %xmm11,48(%rsp)
- vpaddq 96(%rbp),%xmm7,%xmm11
- vmovdqa %xmm8,64(%rsp)
- movq %rax,%r14
- vmovdqa %xmm9,80(%rsp)
- movq %rbx,%rdi
- vmovdqa %xmm10,96(%rsp)
- xorq %rcx,%rdi
- vmovdqa %xmm11,112(%rsp)
- movq %r8,%r13
- jmp .Lavx_00_47
-
-.align 16
-.Lavx_00_47:
- addq $256,%rbp
- vpalignr $8,%xmm0,%xmm1,%xmm8
- shrdq $23,%r13,%r13
- movq %r14,%rax
- vpalignr $8,%xmm4,%xmm5,%xmm11
- movq %r9,%r12
- shrdq $5,%r14,%r14
- vpsrlq $1,%xmm8,%xmm10
- xorq %r8,%r13
- xorq %r10,%r12
- vpaddq %xmm11,%xmm0,%xmm0
- shrdq $4,%r13,%r13
- xorq %rax,%r14
- vpsrlq $7,%xmm8,%xmm11
- andq %r8,%r12
- xorq %r8,%r13
- vpsllq $56,%xmm8,%xmm9
- addq 0(%rsp),%r11
- movq %rax,%r15
- vpxor %xmm10,%xmm11,%xmm8
- xorq %r10,%r12
- shrdq $6,%r14,%r14
- vpsrlq $7,%xmm10,%xmm10
- xorq %rbx,%r15
- addq %r12,%r11
- vpxor %xmm9,%xmm8,%xmm8
- shrdq $14,%r13,%r13
- andq %r15,%rdi
- vpsllq $7,%xmm9,%xmm9
- xorq %rax,%r14
- addq %r13,%r11
- vpxor %xmm10,%xmm8,%xmm8
- xorq %rbx,%rdi
- shrdq $28,%r14,%r14
- vpsrlq $6,%xmm7,%xmm11
- addq %r11,%rdx
- addq %rdi,%r11
- vpxor %xmm9,%xmm8,%xmm8
- movq %rdx,%r13
- addq %r11,%r14
- vpsllq $3,%xmm7,%xmm10
- shrdq $23,%r13,%r13
- movq %r14,%r11
- vpaddq %xmm8,%xmm0,%xmm0
- movq %r8,%r12
- shrdq $5,%r14,%r14
- vpsrlq $19,%xmm7,%xmm9
- xorq %rdx,%r13
- xorq %r9,%r12
- vpxor %xmm10,%xmm11,%xmm11
- shrdq $4,%r13,%r13
- xorq %r11,%r14
- vpsllq $42,%xmm10,%xmm10
- andq %rdx,%r12
- xorq %rdx,%r13
- vpxor %xmm9,%xmm11,%xmm11
- addq 8(%rsp),%r10
- movq %r11,%rdi
- vpsrlq $42,%xmm9,%xmm9
- xorq %r9,%r12
- shrdq $6,%r14,%r14
- vpxor %xmm10,%xmm11,%xmm11
- xorq %rax,%rdi
- addq %r12,%r10
- vpxor %xmm9,%xmm11,%xmm11
- shrdq $14,%r13,%r13
- andq %rdi,%r15
- vpaddq %xmm11,%xmm0,%xmm0
- xorq %r11,%r14
- addq %r13,%r10
- vpaddq -128(%rbp),%xmm0,%xmm10
- xorq %rax,%r15
- shrdq $28,%r14,%r14
- addq %r10,%rcx
- addq %r15,%r10
- movq %rcx,%r13
- addq %r10,%r14
- vmovdqa %xmm10,0(%rsp)
- vpalignr $8,%xmm1,%xmm2,%xmm8
- shrdq $23,%r13,%r13
- movq %r14,%r10
- vpalignr $8,%xmm5,%xmm6,%xmm11
- movq %rdx,%r12
- shrdq $5,%r14,%r14
- vpsrlq $1,%xmm8,%xmm10
- xorq %rcx,%r13
- xorq %r8,%r12
- vpaddq %xmm11,%xmm1,%xmm1
- shrdq $4,%r13,%r13
- xorq %r10,%r14
- vpsrlq $7,%xmm8,%xmm11
- andq %rcx,%r12
- xorq %rcx,%r13
- vpsllq $56,%xmm8,%xmm9
- addq 16(%rsp),%r9
- movq %r10,%r15
- vpxor %xmm10,%xmm11,%xmm8
- xorq %r8,%r12
- shrdq $6,%r14,%r14
- vpsrlq $7,%xmm10,%xmm10
- xorq %r11,%r15
- addq %r12,%r9
- vpxor %xmm9,%xmm8,%xmm8
- shrdq $14,%r13,%r13
- andq %r15,%rdi
- vpsllq $7,%xmm9,%xmm9
- xorq %r10,%r14
- addq %r13,%r9
- vpxor %xmm10,%xmm8,%xmm8
- xorq %r11,%rdi
- shrdq $28,%r14,%r14
- vpsrlq $6,%xmm0,%xmm11
- addq %r9,%rbx
- addq %rdi,%r9
- vpxor %xmm9,%xmm8,%xmm8
- movq %rbx,%r13
- addq %r9,%r14
- vpsllq $3,%xmm0,%xmm10
- shrdq $23,%r13,%r13
- movq %r14,%r9
- vpaddq %xmm8,%xmm1,%xmm1
- movq %rcx,%r12
- shrdq $5,%r14,%r14
- vpsrlq $19,%xmm0,%xmm9
- xorq %rbx,%r13
- xorq %rdx,%r12
- vpxor %xmm10,%xmm11,%xmm11
- shrdq $4,%r13,%r13
- xorq %r9,%r14
- vpsllq $42,%xmm10,%xmm10
- andq %rbx,%r12
- xorq %rbx,%r13
- vpxor %xmm9,%xmm11,%xmm11
- addq 24(%rsp),%r8
- movq %r9,%rdi
- vpsrlq $42,%xmm9,%xmm9
- xorq %rdx,%r12
- shrdq $6,%r14,%r14
- vpxor %xmm10,%xmm11,%xmm11
- xorq %r10,%rdi
- addq %r12,%r8
- vpxor %xmm9,%xmm11,%xmm11
- shrdq $14,%r13,%r13
- andq %rdi,%r15
- vpaddq %xmm11,%xmm1,%xmm1
- xorq %r9,%r14
- addq %r13,%r8
- vpaddq -96(%rbp),%xmm1,%xmm10
- xorq %r10,%r15
- shrdq $28,%r14,%r14
- addq %r8,%rax
- addq %r15,%r8
- movq %rax,%r13
- addq %r8,%r14
- vmovdqa %xmm10,16(%rsp)
- vpalignr $8,%xmm2,%xmm3,%xmm8
- shrdq $23,%r13,%r13
- movq %r14,%r8
- vpalignr $8,%xmm6,%xmm7,%xmm11
- movq %rbx,%r12
- shrdq $5,%r14,%r14
- vpsrlq $1,%xmm8,%xmm10
- xorq %rax,%r13
- xorq %rcx,%r12
- vpaddq %xmm11,%xmm2,%xmm2
- shrdq $4,%r13,%r13
- xorq %r8,%r14
- vpsrlq $7,%xmm8,%xmm11
- andq %rax,%r12
- xorq %rax,%r13
- vpsllq $56,%xmm8,%xmm9
- addq 32(%rsp),%rdx
- movq %r8,%r15
- vpxor %xmm10,%xmm11,%xmm8
- xorq %rcx,%r12
- shrdq $6,%r14,%r14
- vpsrlq $7,%xmm10,%xmm10
- xorq %r9,%r15
- addq %r12,%rdx
- vpxor %xmm9,%xmm8,%xmm8
- shrdq $14,%r13,%r13
- andq %r15,%rdi
- vpsllq $7,%xmm9,%xmm9
- xorq %r8,%r14
- addq %r13,%rdx
- vpxor %xmm10,%xmm8,%xmm8
- xorq %r9,%rdi
- shrdq $28,%r14,%r14
- vpsrlq $6,%xmm1,%xmm11
- addq %rdx,%r11
- addq %rdi,%rdx
- vpxor %xmm9,%xmm8,%xmm8
- movq %r11,%r13
- addq %rdx,%r14
- vpsllq $3,%xmm1,%xmm10
- shrdq $23,%r13,%r13
- movq %r14,%rdx
- vpaddq %xmm8,%xmm2,%xmm2
- movq %rax,%r12
- shrdq $5,%r14,%r14
- vpsrlq $19,%xmm1,%xmm9
- xorq %r11,%r13
- xorq %rbx,%r12
- vpxor %xmm10,%xmm11,%xmm11
- shrdq $4,%r13,%r13
- xorq %rdx,%r14
- vpsllq $42,%xmm10,%xmm10
- andq %r11,%r12
- xorq %r11,%r13
- vpxor %xmm9,%xmm11,%xmm11
- addq 40(%rsp),%rcx
- movq %rdx,%rdi
- vpsrlq $42,%xmm9,%xmm9
- xorq %rbx,%r12
- shrdq $6,%r14,%r14
- vpxor %xmm10,%xmm11,%xmm11
- xorq %r8,%rdi
- addq %r12,%rcx
- vpxor %xmm9,%xmm11,%xmm11
- shrdq $14,%r13,%r13
- andq %rdi,%r15
- vpaddq %xmm11,%xmm2,%xmm2
- xorq %rdx,%r14
- addq %r13,%rcx
- vpaddq -64(%rbp),%xmm2,%xmm10
- xorq %r8,%r15
- shrdq $28,%r14,%r14
- addq %rcx,%r10
- addq %r15,%rcx
- movq %r10,%r13
- addq %rcx,%r14
- vmovdqa %xmm10,32(%rsp)
- vpalignr $8,%xmm3,%xmm4,%xmm8
- shrdq $23,%r13,%r13
- movq %r14,%rcx
- vpalignr $8,%xmm7,%xmm0,%xmm11
- movq %r11,%r12
- shrdq $5,%r14,%r14
- vpsrlq $1,%xmm8,%xmm10
- xorq %r10,%r13
- xorq %rax,%r12
- vpaddq %xmm11,%xmm3,%xmm3
- shrdq $4,%r13,%r13
- xorq %rcx,%r14
- vpsrlq $7,%xmm8,%xmm11
- andq %r10,%r12
- xorq %r10,%r13
- vpsllq $56,%xmm8,%xmm9
- addq 48(%rsp),%rbx
- movq %rcx,%r15
- vpxor %xmm10,%xmm11,%xmm8
- xorq %rax,%r12
- shrdq $6,%r14,%r14
- vpsrlq $7,%xmm10,%xmm10
- xorq %rdx,%r15
- addq %r12,%rbx
- vpxor %xmm9,%xmm8,%xmm8
- shrdq $14,%r13,%r13
- andq %r15,%rdi
- vpsllq $7,%xmm9,%xmm9
- xorq %rcx,%r14
- addq %r13,%rbx
- vpxor %xmm10,%xmm8,%xmm8
- xorq %rdx,%rdi
- shrdq $28,%r14,%r14
- vpsrlq $6,%xmm2,%xmm11
- addq %rbx,%r9
- addq %rdi,%rbx
- vpxor %xmm9,%xmm8,%xmm8
- movq %r9,%r13
- addq %rbx,%r14
- vpsllq $3,%xmm2,%xmm10
- shrdq $23,%r13,%r13
- movq %r14,%rbx
- vpaddq %xmm8,%xmm3,%xmm3
- movq %r10,%r12
- shrdq $5,%r14,%r14
- vpsrlq $19,%xmm2,%xmm9
- xorq %r9,%r13
- xorq %r11,%r12
- vpxor %xmm10,%xmm11,%xmm11
- shrdq $4,%r13,%r13
- xorq %rbx,%r14
- vpsllq $42,%xmm10,%xmm10
- andq %r9,%r12
- xorq %r9,%r13
- vpxor %xmm9,%xmm11,%xmm11
- addq 56(%rsp),%rax
- movq %rbx,%rdi
- vpsrlq $42,%xmm9,%xmm9
- xorq %r11,%r12
- shrdq $6,%r14,%r14
- vpxor %xmm10,%xmm11,%xmm11
- xorq %rcx,%rdi
- addq %r12,%rax
- vpxor %xmm9,%xmm11,%xmm11
- shrdq $14,%r13,%r13
- andq %rdi,%r15
- vpaddq %xmm11,%xmm3,%xmm3
- xorq %rbx,%r14
- addq %r13,%rax
- vpaddq -32(%rbp),%xmm3,%xmm10
- xorq %rcx,%r15
- shrdq $28,%r14,%r14
- addq %rax,%r8
- addq %r15,%rax
- movq %r8,%r13
- addq %rax,%r14
- vmovdqa %xmm10,48(%rsp)
- vpalignr $8,%xmm4,%xmm5,%xmm8
- shrdq $23,%r13,%r13
- movq %r14,%rax
- vpalignr $8,%xmm0,%xmm1,%xmm11
- movq %r9,%r12
- shrdq $5,%r14,%r14
- vpsrlq $1,%xmm8,%xmm10
- xorq %r8,%r13
- xorq %r10,%r12
- vpaddq %xmm11,%xmm4,%xmm4
- shrdq $4,%r13,%r13
- xorq %rax,%r14
- vpsrlq $7,%xmm8,%xmm11
- andq %r8,%r12
- xorq %r8,%r13
- vpsllq $56,%xmm8,%xmm9
- addq 64(%rsp),%r11
- movq %rax,%r15
- vpxor %xmm10,%xmm11,%xmm8
- xorq %r10,%r12
- shrdq $6,%r14,%r14
- vpsrlq $7,%xmm10,%xmm10
- xorq %rbx,%r15
- addq %r12,%r11
- vpxor %xmm9,%xmm8,%xmm8
- shrdq $14,%r13,%r13
- andq %r15,%rdi
- vpsllq $7,%xmm9,%xmm9
- xorq %rax,%r14
- addq %r13,%r11
- vpxor %xmm10,%xmm8,%xmm8
- xorq %rbx,%rdi
- shrdq $28,%r14,%r14
- vpsrlq $6,%xmm3,%xmm11
- addq %r11,%rdx
- addq %rdi,%r11
- vpxor %xmm9,%xmm8,%xmm8
- movq %rdx,%r13
- addq %r11,%r14
- vpsllq $3,%xmm3,%xmm10
- shrdq $23,%r13,%r13
- movq %r14,%r11
- vpaddq %xmm8,%xmm4,%xmm4
- movq %r8,%r12
- shrdq $5,%r14,%r14
- vpsrlq $19,%xmm3,%xmm9
- xorq %rdx,%r13
- xorq %r9,%r12
- vpxor %xmm10,%xmm11,%xmm11
- shrdq $4,%r13,%r13
- xorq %r11,%r14
- vpsllq $42,%xmm10,%xmm10
- andq %rdx,%r12
- xorq %rdx,%r13
- vpxor %xmm9,%xmm11,%xmm11
- addq 72(%rsp),%r10
- movq %r11,%rdi
- vpsrlq $42,%xmm9,%xmm9
- xorq %r9,%r12
- shrdq $6,%r14,%r14
- vpxor %xmm10,%xmm11,%xmm11
- xorq %rax,%rdi
- addq %r12,%r10
- vpxor %xmm9,%xmm11,%xmm11
- shrdq $14,%r13,%r13
- andq %rdi,%r15
- vpaddq %xmm11,%xmm4,%xmm4
- xorq %r11,%r14
- addq %r13,%r10
- vpaddq 0(%rbp),%xmm4,%xmm10
- xorq %rax,%r15
- shrdq $28,%r14,%r14
- addq %r10,%rcx
- addq %r15,%r10
- movq %rcx,%r13
- addq %r10,%r14
- vmovdqa %xmm10,64(%rsp)
- vpalignr $8,%xmm5,%xmm6,%xmm8
- shrdq $23,%r13,%r13
- movq %r14,%r10
- vpalignr $8,%xmm1,%xmm2,%xmm11
- movq %rdx,%r12
- shrdq $5,%r14,%r14
- vpsrlq $1,%xmm8,%xmm10
- xorq %rcx,%r13
- xorq %r8,%r12
- vpaddq %xmm11,%xmm5,%xmm5
- shrdq $4,%r13,%r13
- xorq %r10,%r14
- vpsrlq $7,%xmm8,%xmm11
- andq %rcx,%r12
- xorq %rcx,%r13
- vpsllq $56,%xmm8,%xmm9
- addq 80(%rsp),%r9
- movq %r10,%r15
- vpxor %xmm10,%xmm11,%xmm8
- xorq %r8,%r12
- shrdq $6,%r14,%r14
- vpsrlq $7,%xmm10,%xmm10
- xorq %r11,%r15
- addq %r12,%r9
- vpxor %xmm9,%xmm8,%xmm8
- shrdq $14,%r13,%r13
- andq %r15,%rdi
- vpsllq $7,%xmm9,%xmm9
- xorq %r10,%r14
- addq %r13,%r9
- vpxor %xmm10,%xmm8,%xmm8
- xorq %r11,%rdi
- shrdq $28,%r14,%r14
- vpsrlq $6,%xmm4,%xmm11
- addq %r9,%rbx
- addq %rdi,%r9
- vpxor %xmm9,%xmm8,%xmm8
- movq %rbx,%r13
- addq %r9,%r14
- vpsllq $3,%xmm4,%xmm10
- shrdq $23,%r13,%r13
- movq %r14,%r9
- vpaddq %xmm8,%xmm5,%xmm5
- movq %rcx,%r12
- shrdq $5,%r14,%r14
- vpsrlq $19,%xmm4,%xmm9
- xorq %rbx,%r13
- xorq %rdx,%r12
- vpxor %xmm10,%xmm11,%xmm11
- shrdq $4,%r13,%r13
- xorq %r9,%r14
- vpsllq $42,%xmm10,%xmm10
- andq %rbx,%r12
- xorq %rbx,%r13
- vpxor %xmm9,%xmm11,%xmm11
- addq 88(%rsp),%r8
- movq %r9,%rdi
- vpsrlq $42,%xmm9,%xmm9
- xorq %rdx,%r12
- shrdq $6,%r14,%r14
- vpxor %xmm10,%xmm11,%xmm11
- xorq %r10,%rdi
- addq %r12,%r8
- vpxor %xmm9,%xmm11,%xmm11
- shrdq $14,%r13,%r13
- andq %rdi,%r15
- vpaddq %xmm11,%xmm5,%xmm5
- xorq %r9,%r14
- addq %r13,%r8
- vpaddq 32(%rbp),%xmm5,%xmm10
- xorq %r10,%r15
- shrdq $28,%r14,%r14
- addq %r8,%rax
- addq %r15,%r8
- movq %rax,%r13
- addq %r8,%r14
- vmovdqa %xmm10,80(%rsp)
- vpalignr $8,%xmm6,%xmm7,%xmm8
- shrdq $23,%r13,%r13
- movq %r14,%r8
- vpalignr $8,%xmm2,%xmm3,%xmm11
- movq %rbx,%r12
- shrdq $5,%r14,%r14
- vpsrlq $1,%xmm8,%xmm10
- xorq %rax,%r13
- xorq %rcx,%r12
- vpaddq %xmm11,%xmm6,%xmm6
- shrdq $4,%r13,%r13
- xorq %r8,%r14
- vpsrlq $7,%xmm8,%xmm11
- andq %rax,%r12
- xorq %rax,%r13
- vpsllq $56,%xmm8,%xmm9
- addq 96(%rsp),%rdx
- movq %r8,%r15
- vpxor %xmm10,%xmm11,%xmm8
- xorq %rcx,%r12
- shrdq $6,%r14,%r14
- vpsrlq $7,%xmm10,%xmm10
- xorq %r9,%r15
- addq %r12,%rdx
- vpxor %xmm9,%xmm8,%xmm8
- shrdq $14,%r13,%r13
- andq %r15,%rdi
- vpsllq $7,%xmm9,%xmm9
- xorq %r8,%r14
- addq %r13,%rdx
- vpxor %xmm10,%xmm8,%xmm8
- xorq %r9,%rdi
- shrdq $28,%r14,%r14
- vpsrlq $6,%xmm5,%xmm11
- addq %rdx,%r11
- addq %rdi,%rdx
- vpxor %xmm9,%xmm8,%xmm8
- movq %r11,%r13
- addq %rdx,%r14
- vpsllq $3,%xmm5,%xmm10
- shrdq $23,%r13,%r13
- movq %r14,%rdx
- vpaddq %xmm8,%xmm6,%xmm6
- movq %rax,%r12
- shrdq $5,%r14,%r14
- vpsrlq $19,%xmm5,%xmm9
- xorq %r11,%r13
- xorq %rbx,%r12
- vpxor %xmm10,%xmm11,%xmm11
- shrdq $4,%r13,%r13
- xorq %rdx,%r14
- vpsllq $42,%xmm10,%xmm10
- andq %r11,%r12
- xorq %r11,%r13
- vpxor %xmm9,%xmm11,%xmm11
- addq 104(%rsp),%rcx
- movq %rdx,%rdi
- vpsrlq $42,%xmm9,%xmm9
- xorq %rbx,%r12
- shrdq $6,%r14,%r14
- vpxor %xmm10,%xmm11,%xmm11
- xorq %r8,%rdi
- addq %r12,%rcx
- vpxor %xmm9,%xmm11,%xmm11
- shrdq $14,%r13,%r13
- andq %rdi,%r15
- vpaddq %xmm11,%xmm6,%xmm6
- xorq %rdx,%r14
- addq %r13,%rcx
- vpaddq 64(%rbp),%xmm6,%xmm10
- xorq %r8,%r15
- shrdq $28,%r14,%r14
- addq %rcx,%r10
- addq %r15,%rcx
- movq %r10,%r13
- addq %rcx,%r14
- vmovdqa %xmm10,96(%rsp)
- vpalignr $8,%xmm7,%xmm0,%xmm8
- shrdq $23,%r13,%r13
- movq %r14,%rcx
- vpalignr $8,%xmm3,%xmm4,%xmm11
- movq %r11,%r12
- shrdq $5,%r14,%r14
- vpsrlq $1,%xmm8,%xmm10
- xorq %r10,%r13
- xorq %rax,%r12
- vpaddq %xmm11,%xmm7,%xmm7
- shrdq $4,%r13,%r13
- xorq %rcx,%r14
- vpsrlq $7,%xmm8,%xmm11
- andq %r10,%r12
- xorq %r10,%r13
- vpsllq $56,%xmm8,%xmm9
- addq 112(%rsp),%rbx
- movq %rcx,%r15
- vpxor %xmm10,%xmm11,%xmm8
- xorq %rax,%r12
- shrdq $6,%r14,%r14
- vpsrlq $7,%xmm10,%xmm10
- xorq %rdx,%r15
- addq %r12,%rbx
- vpxor %xmm9,%xmm8,%xmm8
- shrdq $14,%r13,%r13
- andq %r15,%rdi
- vpsllq $7,%xmm9,%xmm9
- xorq %rcx,%r14
- addq %r13,%rbx
- vpxor %xmm10,%xmm8,%xmm8
- xorq %rdx,%rdi
- shrdq $28,%r14,%r14
- vpsrlq $6,%xmm6,%xmm11
- addq %rbx,%r9
- addq %rdi,%rbx
- vpxor %xmm9,%xmm8,%xmm8
- movq %r9,%r13
- addq %rbx,%r14
- vpsllq $3,%xmm6,%xmm10
- shrdq $23,%r13,%r13
- movq %r14,%rbx
- vpaddq %xmm8,%xmm7,%xmm7
- movq %r10,%r12
- shrdq $5,%r14,%r14
- vpsrlq $19,%xmm6,%xmm9
- xorq %r9,%r13
- xorq %r11,%r12
- vpxor %xmm10,%xmm11,%xmm11
- shrdq $4,%r13,%r13
- xorq %rbx,%r14
- vpsllq $42,%xmm10,%xmm10
- andq %r9,%r12
- xorq %r9,%r13
- vpxor %xmm9,%xmm11,%xmm11
- addq 120(%rsp),%rax
- movq %rbx,%rdi
- vpsrlq $42,%xmm9,%xmm9
- xorq %r11,%r12
- shrdq $6,%r14,%r14
- vpxor %xmm10,%xmm11,%xmm11
- xorq %rcx,%rdi
- addq %r12,%rax
- vpxor %xmm9,%xmm11,%xmm11
- shrdq $14,%r13,%r13
- andq %rdi,%r15
- vpaddq %xmm11,%xmm7,%xmm7
- xorq %rbx,%r14
- addq %r13,%rax
- vpaddq 96(%rbp),%xmm7,%xmm10
- xorq %rcx,%r15
- shrdq $28,%r14,%r14
- addq %rax,%r8
- addq %r15,%rax
- movq %r8,%r13
- addq %rax,%r14
- vmovdqa %xmm10,112(%rsp)
- cmpb $0,135(%rbp)
- jne .Lavx_00_47
- shrdq $23,%r13,%r13
- movq %r14,%rax
- movq %r9,%r12
- shrdq $5,%r14,%r14
- xorq %r8,%r13
- xorq %r10,%r12
- shrdq $4,%r13,%r13
- xorq %rax,%r14
- andq %r8,%r12
- xorq %r8,%r13
- addq 0(%rsp),%r11
- movq %rax,%r15
- xorq %r10,%r12
- shrdq $6,%r14,%r14
- xorq %rbx,%r15
- addq %r12,%r11
- shrdq $14,%r13,%r13
- andq %r15,%rdi
- xorq %rax,%r14
- addq %r13,%r11
- xorq %rbx,%rdi
- shrdq $28,%r14,%r14
- addq %r11,%rdx
- addq %rdi,%r11
- movq %rdx,%r13
- addq %r11,%r14
- shrdq $23,%r13,%r13
- movq %r14,%r11
- movq %r8,%r12
- shrdq $5,%r14,%r14
- xorq %rdx,%r13
- xorq %r9,%r12
- shrdq $4,%r13,%r13
- xorq %r11,%r14
- andq %rdx,%r12
- xorq %rdx,%r13
- addq 8(%rsp),%r10
- movq %r11,%rdi
- xorq %r9,%r12
- shrdq $6,%r14,%r14
- xorq %rax,%rdi
- addq %r12,%r10
- shrdq $14,%r13,%r13
- andq %rdi,%r15
- xorq %r11,%r14
- addq %r13,%r10
- xorq %rax,%r15
- shrdq $28,%r14,%r14
- addq %r10,%rcx
- addq %r15,%r10
- movq %rcx,%r13
- addq %r10,%r14
- shrdq $23,%r13,%r13
- movq %r14,%r10
- movq %rdx,%r12
- shrdq $5,%r14,%r14
- xorq %rcx,%r13
- xorq %r8,%r12
- shrdq $4,%r13,%r13
- xorq %r10,%r14
- andq %rcx,%r12
- xorq %rcx,%r13
- addq 16(%rsp),%r9
- movq %r10,%r15
- xorq %r8,%r12
- shrdq $6,%r14,%r14
- xorq %r11,%r15
- addq %r12,%r9
- shrdq $14,%r13,%r13
- andq %r15,%rdi
- xorq %r10,%r14
- addq %r13,%r9
- xorq %r11,%rdi
- shrdq $28,%r14,%r14
- addq %r9,%rbx
- addq %rdi,%r9
- movq %rbx,%r13
- addq %r9,%r14
- shrdq $23,%r13,%r13
- movq %r14,%r9
- movq %rcx,%r12
- shrdq $5,%r14,%r14
- xorq %rbx,%r13
- xorq %rdx,%r12
- shrdq $4,%r13,%r13
- xorq %r9,%r14
- andq %rbx,%r12
- xorq %rbx,%r13
- addq 24(%rsp),%r8
- movq %r9,%rdi
- xorq %rdx,%r12
- shrdq $6,%r14,%r14
- xorq %r10,%rdi
- addq %r12,%r8
- shrdq $14,%r13,%r13
- andq %rdi,%r15
- xorq %r9,%r14
- addq %r13,%r8
- xorq %r10,%r15
- shrdq $28,%r14,%r14
- addq %r8,%rax
- addq %r15,%r8
- movq %rax,%r13
- addq %r8,%r14
- shrdq $23,%r13,%r13
- movq %r14,%r8
- movq %rbx,%r12
- shrdq $5,%r14,%r14
- xorq %rax,%r13
- xorq %rcx,%r12
- shrdq $4,%r13,%r13
- xorq %r8,%r14
- andq %rax,%r12
- xorq %rax,%r13
- addq 32(%rsp),%rdx
- movq %r8,%r15
- xorq %rcx,%r12
- shrdq $6,%r14,%r14
- xorq %r9,%r15
- addq %r12,%rdx
- shrdq $14,%r13,%r13
- andq %r15,%rdi
- xorq %r8,%r14
- addq %r13,%rdx
- xorq %r9,%rdi
- shrdq $28,%r14,%r14
- addq %rdx,%r11
- addq %rdi,%rdx
- movq %r11,%r13
- addq %rdx,%r14
- shrdq $23,%r13,%r13
- movq %r14,%rdx
- movq %rax,%r12
- shrdq $5,%r14,%r14
- xorq %r11,%r13
- xorq %rbx,%r12
- shrdq $4,%r13,%r13
- xorq %rdx,%r14
- andq %r11,%r12
- xorq %r11,%r13
- addq 40(%rsp),%rcx
- movq %rdx,%rdi
- xorq %rbx,%r12
- shrdq $6,%r14,%r14
- xorq %r8,%rdi
- addq %r12,%rcx
- shrdq $14,%r13,%r13
- andq %rdi,%r15
- xorq %rdx,%r14
- addq %r13,%rcx
- xorq %r8,%r15
- shrdq $28,%r14,%r14
- addq %rcx,%r10
- addq %r15,%rcx
- movq %r10,%r13
- addq %rcx,%r14
- shrdq $23,%r13,%r13
- movq %r14,%rcx
- movq %r11,%r12
- shrdq $5,%r14,%r14
- xorq %r10,%r13
- xorq %rax,%r12
- shrdq $4,%r13,%r13
- xorq %rcx,%r14
- andq %r10,%r12
- xorq %r10,%r13
- addq 48(%rsp),%rbx
- movq %rcx,%r15
- xorq %rax,%r12
- shrdq $6,%r14,%r14
- xorq %rdx,%r15
- addq %r12,%rbx
- shrdq $14,%r13,%r13
- andq %r15,%rdi
- xorq %rcx,%r14
- addq %r13,%rbx
- xorq %rdx,%rdi
- shrdq $28,%r14,%r14
- addq %rbx,%r9
- addq %rdi,%rbx
- movq %r9,%r13
- addq %rbx,%r14
- shrdq $23,%r13,%r13
- movq %r14,%rbx
- movq %r10,%r12
- shrdq $5,%r14,%r14
- xorq %r9,%r13
- xorq %r11,%r12
- shrdq $4,%r13,%r13
- xorq %rbx,%r14
- andq %r9,%r12
- xorq %r9,%r13
- addq 56(%rsp),%rax
- movq %rbx,%rdi
- xorq %r11,%r12
- shrdq $6,%r14,%r14
- xorq %rcx,%rdi
- addq %r12,%rax
- shrdq $14,%r13,%r13
- andq %rdi,%r15
- xorq %rbx,%r14
- addq %r13,%rax
- xorq %rcx,%r15
- shrdq $28,%r14,%r14
- addq %rax,%r8
- addq %r15,%rax
- movq %r8,%r13
- addq %rax,%r14
- shrdq $23,%r13,%r13
- movq %r14,%rax
- movq %r9,%r12
- shrdq $5,%r14,%r14
- xorq %r8,%r13
- xorq %r10,%r12
- shrdq $4,%r13,%r13
- xorq %rax,%r14
- andq %r8,%r12
- xorq %r8,%r13
- addq 64(%rsp),%r11
- movq %rax,%r15
- xorq %r10,%r12
- shrdq $6,%r14,%r14
- xorq %rbx,%r15
- addq %r12,%r11
- shrdq $14,%r13,%r13
- andq %r15,%rdi
- xorq %rax,%r14
- addq %r13,%r11
- xorq %rbx,%rdi
- shrdq $28,%r14,%r14
- addq %r11,%rdx
- addq %rdi,%r11
- movq %rdx,%r13
- addq %r11,%r14
- shrdq $23,%r13,%r13
- movq %r14,%r11
- movq %r8,%r12
- shrdq $5,%r14,%r14
- xorq %rdx,%r13
- xorq %r9,%r12
- shrdq $4,%r13,%r13
- xorq %r11,%r14
- andq %rdx,%r12
- xorq %rdx,%r13
- addq 72(%rsp),%r10
- movq %r11,%rdi
- xorq %r9,%r12
- shrdq $6,%r14,%r14
- xorq %rax,%rdi
- addq %r12,%r10
- shrdq $14,%r13,%r13
- andq %rdi,%r15
- xorq %r11,%r14
- addq %r13,%r10
- xorq %rax,%r15
- shrdq $28,%r14,%r14
- addq %r10,%rcx
- addq %r15,%r10
- movq %rcx,%r13
- addq %r10,%r14
- shrdq $23,%r13,%r13
- movq %r14,%r10
- movq %rdx,%r12
- shrdq $5,%r14,%r14
- xorq %rcx,%r13
- xorq %r8,%r12
- shrdq $4,%r13,%r13
- xorq %r10,%r14
- andq %rcx,%r12
- xorq %rcx,%r13
- addq 80(%rsp),%r9
- movq %r10,%r15
- xorq %r8,%r12
- shrdq $6,%r14,%r14
- xorq %r11,%r15
- addq %r12,%r9
- shrdq $14,%r13,%r13
- andq %r15,%rdi
- xorq %r10,%r14
- addq %r13,%r9
- xorq %r11,%rdi
- shrdq $28,%r14,%r14
- addq %r9,%rbx
- addq %rdi,%r9
- movq %rbx,%r13
- addq %r9,%r14
- shrdq $23,%r13,%r13
- movq %r14,%r9
- movq %rcx,%r12
- shrdq $5,%r14,%r14
- xorq %rbx,%r13
- xorq %rdx,%r12
- shrdq $4,%r13,%r13
- xorq %r9,%r14
- andq %rbx,%r12
- xorq %rbx,%r13
- addq 88(%rsp),%r8
- movq %r9,%rdi
- xorq %rdx,%r12
- shrdq $6,%r14,%r14
- xorq %r10,%rdi
- addq %r12,%r8
- shrdq $14,%r13,%r13
- andq %rdi,%r15
- xorq %r9,%r14
- addq %r13,%r8
- xorq %r10,%r15
- shrdq $28,%r14,%r14
- addq %r8,%rax
- addq %r15,%r8
- movq %rax,%r13
- addq %r8,%r14
- shrdq $23,%r13,%r13
- movq %r14,%r8
- movq %rbx,%r12
- shrdq $5,%r14,%r14
- xorq %rax,%r13
- xorq %rcx,%r12
- shrdq $4,%r13,%r13
- xorq %r8,%r14
- andq %rax,%r12
- xorq %rax,%r13
- addq 96(%rsp),%rdx
- movq %r8,%r15
- xorq %rcx,%r12
- shrdq $6,%r14,%r14
- xorq %r9,%r15
- addq %r12,%rdx
- shrdq $14,%r13,%r13
- andq %r15,%rdi
- xorq %r8,%r14
- addq %r13,%rdx
- xorq %r9,%rdi
- shrdq $28,%r14,%r14
- addq %rdx,%r11
- addq %rdi,%rdx
- movq %r11,%r13
- addq %rdx,%r14
- shrdq $23,%r13,%r13
- movq %r14,%rdx
- movq %rax,%r12
- shrdq $5,%r14,%r14
- xorq %r11,%r13
- xorq %rbx,%r12
- shrdq $4,%r13,%r13
- xorq %rdx,%r14
- andq %r11,%r12
- xorq %r11,%r13
- addq 104(%rsp),%rcx
- movq %rdx,%rdi
- xorq %rbx,%r12
- shrdq $6,%r14,%r14
- xorq %r8,%rdi
- addq %r12,%rcx
- shrdq $14,%r13,%r13
- andq %rdi,%r15
- xorq %rdx,%r14
- addq %r13,%rcx
- xorq %r8,%r15
- shrdq $28,%r14,%r14
- addq %rcx,%r10
- addq %r15,%rcx
- movq %r10,%r13
- addq %rcx,%r14
- shrdq $23,%r13,%r13
- movq %r14,%rcx
- movq %r11,%r12
- shrdq $5,%r14,%r14
- xorq %r10,%r13
- xorq %rax,%r12
- shrdq $4,%r13,%r13
- xorq %rcx,%r14
- andq %r10,%r12
- xorq %r10,%r13
- addq 112(%rsp),%rbx
- movq %rcx,%r15
- xorq %rax,%r12
- shrdq $6,%r14,%r14
- xorq %rdx,%r15
- addq %r12,%rbx
- shrdq $14,%r13,%r13
- andq %r15,%rdi
- xorq %rcx,%r14
- addq %r13,%rbx
- xorq %rdx,%rdi
- shrdq $28,%r14,%r14
- addq %rbx,%r9
- addq %rdi,%rbx
- movq %r9,%r13
- addq %rbx,%r14
- shrdq $23,%r13,%r13
- movq %r14,%rbx
- movq %r10,%r12
- shrdq $5,%r14,%r14
- xorq %r9,%r13
- xorq %r11,%r12
- shrdq $4,%r13,%r13
- xorq %rbx,%r14
- andq %r9,%r12
- xorq %r9,%r13
- addq 120(%rsp),%rax
- movq %rbx,%rdi
- xorq %r11,%r12
- shrdq $6,%r14,%r14
- xorq %rcx,%rdi
- addq %r12,%rax
- shrdq $14,%r13,%r13
- andq %rdi,%r15
- xorq %rbx,%r14
- addq %r13,%rax
- xorq %rcx,%r15
- shrdq $28,%r14,%r14
- addq %rax,%r8
- addq %r15,%rax
- movq %r8,%r13
- addq %rax,%r14
- movq 128+0(%rsp),%rdi
- movq %r14,%rax
-
- addq 0(%rdi),%rax
- leaq 128(%rsi),%rsi
- addq 8(%rdi),%rbx
- addq 16(%rdi),%rcx
- addq 24(%rdi),%rdx
- addq 32(%rdi),%r8
- addq 40(%rdi),%r9
- addq 48(%rdi),%r10
- addq 56(%rdi),%r11
-
- cmpq 128+16(%rsp),%rsi
-
- movq %rax,0(%rdi)
- movq %rbx,8(%rdi)
- movq %rcx,16(%rdi)
- movq %rdx,24(%rdi)
- movq %r8,32(%rdi)
- movq %r9,40(%rdi)
- movq %r10,48(%rdi)
- movq %r11,56(%rdi)
- jb .Lloop_avx
-
- movq 152(%rsp),%rsi
-.cfi_def_cfa %rsi,8
- vzeroupper
- movq -48(%rsi),%r15
-.cfi_restore %r15
- movq -40(%rsi),%r14
-.cfi_restore %r14
- movq -32(%rsi),%r13
-.cfi_restore %r13
- movq -24(%rsi),%r12
-.cfi_restore %r12
- movq -16(%rsi),%rbp
-.cfi_restore %rbp
- movq -8(%rsi),%rbx
-.cfi_restore %rbx
- leaq (%rsi),%rsp
-.cfi_def_cfa_register %rsp
-.Lepilogue_avx:
- .byte 0xf3,0xc3
-.cfi_endproc
-.size sha512_block_data_order_avx,.-sha512_block_data_order_avx
-.type sha512_block_data_order_avx2,@function
-.align 64
-sha512_block_data_order_avx2:
-.cfi_startproc
-.Lavx2_shortcut:
- movq %rsp,%rax
-.cfi_def_cfa_register %rax
- pushq %rbx
-.cfi_offset %rbx,-16
- pushq %rbp
-.cfi_offset %rbp,-24
- pushq %r12
-.cfi_offset %r12,-32
- pushq %r13
-.cfi_offset %r13,-40
- pushq %r14
-.cfi_offset %r14,-48
- pushq %r15
-.cfi_offset %r15,-56
- subq $1312,%rsp
- shlq $4,%rdx
- andq $-2048,%rsp
- leaq (%rsi,%rdx,8),%rdx
- addq $1152,%rsp
- movq %rdi,128+0(%rsp)
- movq %rsi,128+8(%rsp)
- movq %rdx,128+16(%rsp)
- movq %rax,152(%rsp)
-.cfi_escape 0x0f,0x06,0x77,0x98,0x01,0x06,0x23,0x08
-.Lprologue_avx2:
-
- vzeroupper
- subq $-128,%rsi
- movq 0(%rdi),%rax
- movq %rsi,%r12
- movq 8(%rdi),%rbx
- cmpq %rdx,%rsi
- movq 16(%rdi),%rcx
- cmoveq %rsp,%r12
- movq 24(%rdi),%rdx
- movq 32(%rdi),%r8
- movq 40(%rdi),%r9
- movq 48(%rdi),%r10
- movq 56(%rdi),%r11
- jmp .Loop_avx2
-.align 16
-.Loop_avx2:
- vmovdqu -128(%rsi),%xmm0
- vmovdqu -128+16(%rsi),%xmm1
- vmovdqu -128+32(%rsi),%xmm2
- leaq K512+128(%rip),%rbp
- vmovdqu -128+48(%rsi),%xmm3
- vmovdqu -128+64(%rsi),%xmm4
- vmovdqu -128+80(%rsi),%xmm5
- vmovdqu -128+96(%rsi),%xmm6
- vmovdqu -128+112(%rsi),%xmm7
-
- vmovdqa 1152(%rbp),%ymm10
- vinserti128 $1,(%r12),%ymm0,%ymm0
- vinserti128 $1,16(%r12),%ymm1,%ymm1
- vpshufb %ymm10,%ymm0,%ymm0
- vinserti128 $1,32(%r12),%ymm2,%ymm2
- vpshufb %ymm10,%ymm1,%ymm1
- vinserti128 $1,48(%r12),%ymm3,%ymm3
- vpshufb %ymm10,%ymm2,%ymm2
- vinserti128 $1,64(%r12),%ymm4,%ymm4
- vpshufb %ymm10,%ymm3,%ymm3
- vinserti128 $1,80(%r12),%ymm5,%ymm5
- vpshufb %ymm10,%ymm4,%ymm4
- vinserti128 $1,96(%r12),%ymm6,%ymm6
- vpshufb %ymm10,%ymm5,%ymm5
- vinserti128 $1,112(%r12),%ymm7,%ymm7
-
- vpaddq -128(%rbp),%ymm0,%ymm8
- vpshufb %ymm10,%ymm6,%ymm6
- vpaddq -96(%rbp),%ymm1,%ymm9
- vpshufb %ymm10,%ymm7,%ymm7
- vpaddq -64(%rbp),%ymm2,%ymm10
- vpaddq -32(%rbp),%ymm3,%ymm11
- vmovdqa %ymm8,0(%rsp)
- vpaddq 0(%rbp),%ymm4,%ymm8
- vmovdqa %ymm9,32(%rsp)
- vpaddq 32(%rbp),%ymm5,%ymm9
- vmovdqa %ymm10,64(%rsp)
- vpaddq 64(%rbp),%ymm6,%ymm10
- vmovdqa %ymm11,96(%rsp)
- leaq -128(%rsp),%rsp
- vpaddq 96(%rbp),%ymm7,%ymm11
- vmovdqa %ymm8,0(%rsp)
- xorq %r14,%r14
- vmovdqa %ymm9,32(%rsp)
- movq %rbx,%rdi
- vmovdqa %ymm10,64(%rsp)
- xorq %rcx,%rdi
- vmovdqa %ymm11,96(%rsp)
- movq %r9,%r12
- addq $32*8,%rbp
- jmp .Lavx2_00_47
-
-.align 16
-.Lavx2_00_47:
- leaq -128(%rsp),%rsp
- vpalignr $8,%ymm0,%ymm1,%ymm8
- addq 0+256(%rsp),%r11
- andq %r8,%r12
- rorxq $41,%r8,%r13
- vpalignr $8,%ymm4,%ymm5,%ymm11
- rorxq $18,%r8,%r15
- leaq (%rax,%r14,1),%rax
- leaq (%r11,%r12,1),%r11
- vpsrlq $1,%ymm8,%ymm10
- andnq %r10,%r8,%r12
- xorq %r15,%r13
- rorxq $14,%r8,%r14
- vpaddq %ymm11,%ymm0,%ymm0
- vpsrlq $7,%ymm8,%ymm11
- leaq (%r11,%r12,1),%r11
- xorq %r14,%r13
- movq %rax,%r15
- vpsllq $56,%ymm8,%ymm9
- vpxor %ymm10,%ymm11,%ymm8
- rorxq $39,%rax,%r12
- leaq (%r11,%r13,1),%r11
- xorq %rbx,%r15
- vpsrlq $7,%ymm10,%ymm10
- vpxor %ymm9,%ymm8,%ymm8
- rorxq $34,%rax,%r14
- rorxq $28,%rax,%r13
- leaq (%rdx,%r11,1),%rdx
- vpsllq $7,%ymm9,%ymm9
- vpxor %ymm10,%ymm8,%ymm8
- andq %r15,%rdi
- xorq %r12,%r14
- xorq %rbx,%rdi
- vpsrlq $6,%ymm7,%ymm11
- vpxor %ymm9,%ymm8,%ymm8
- xorq %r13,%r14
- leaq (%r11,%rdi,1),%r11
- movq %r8,%r12
- vpsllq $3,%ymm7,%ymm10
- vpaddq %ymm8,%ymm0,%ymm0
- addq 8+256(%rsp),%r10
- andq %rdx,%r12
- rorxq $41,%rdx,%r13
- vpsrlq $19,%ymm7,%ymm9
- vpxor %ymm10,%ymm11,%ymm11
- rorxq $18,%rdx,%rdi
- leaq (%r11,%r14,1),%r11
- leaq (%r10,%r12,1),%r10
- vpsllq $42,%ymm10,%ymm10
- vpxor %ymm9,%ymm11,%ymm11
- andnq %r9,%rdx,%r12
- xorq %rdi,%r13
- rorxq $14,%rdx,%r14
- vpsrlq $42,%ymm9,%ymm9
- vpxor %ymm10,%ymm11,%ymm11
- leaq (%r10,%r12,1),%r10
- xorq %r14,%r13
- movq %r11,%rdi
- vpxor %ymm9,%ymm11,%ymm11
- rorxq $39,%r11,%r12
- leaq (%r10,%r13,1),%r10
- xorq %rax,%rdi
- vpaddq %ymm11,%ymm0,%ymm0
- rorxq $34,%r11,%r14
- rorxq $28,%r11,%r13
- leaq (%rcx,%r10,1),%rcx
- vpaddq -128(%rbp),%ymm0,%ymm10
- andq %rdi,%r15
- xorq %r12,%r14
- xorq %rax,%r15
- xorq %r13,%r14
- leaq (%r10,%r15,1),%r10
- movq %rdx,%r12
- vmovdqa %ymm10,0(%rsp)
- vpalignr $8,%ymm1,%ymm2,%ymm8
- addq 32+256(%rsp),%r9
- andq %rcx,%r12
- rorxq $41,%rcx,%r13
- vpalignr $8,%ymm5,%ymm6,%ymm11
- rorxq $18,%rcx,%r15
- leaq (%r10,%r14,1),%r10
- leaq (%r9,%r12,1),%r9
- vpsrlq $1,%ymm8,%ymm10
- andnq %r8,%rcx,%r12
- xorq %r15,%r13
- rorxq $14,%rcx,%r14
- vpaddq %ymm11,%ymm1,%ymm1
- vpsrlq $7,%ymm8,%ymm11
- leaq (%r9,%r12,1),%r9
- xorq %r14,%r13
- movq %r10,%r15
- vpsllq $56,%ymm8,%ymm9
- vpxor %ymm10,%ymm11,%ymm8
- rorxq $39,%r10,%r12
- leaq (%r9,%r13,1),%r9
- xorq %r11,%r15
- vpsrlq $7,%ymm10,%ymm10
- vpxor %ymm9,%ymm8,%ymm8
- rorxq $34,%r10,%r14
- rorxq $28,%r10,%r13
- leaq (%rbx,%r9,1),%rbx
- vpsllq $7,%ymm9,%ymm9
- vpxor %ymm10,%ymm8,%ymm8
- andq %r15,%rdi
- xorq %r12,%r14
- xorq %r11,%rdi
- vpsrlq $6,%ymm0,%ymm11
- vpxor %ymm9,%ymm8,%ymm8
- xorq %r13,%r14
- leaq (%r9,%rdi,1),%r9
- movq %rcx,%r12
- vpsllq $3,%ymm0,%ymm10
- vpaddq %ymm8,%ymm1,%ymm1
- addq 40+256(%rsp),%r8
- andq %rbx,%r12
- rorxq $41,%rbx,%r13
- vpsrlq $19,%ymm0,%ymm9
- vpxor %ymm10,%ymm11,%ymm11
- rorxq $18,%rbx,%rdi
- leaq (%r9,%r14,1),%r9
- leaq (%r8,%r12,1),%r8
- vpsllq $42,%ymm10,%ymm10
- vpxor %ymm9,%ymm11,%ymm11
- andnq %rdx,%rbx,%r12
- xorq %rdi,%r13
- rorxq $14,%rbx,%r14
- vpsrlq $42,%ymm9,%ymm9
- vpxor %ymm10,%ymm11,%ymm11
- leaq (%r8,%r12,1),%r8
- xorq %r14,%r13
- movq %r9,%rdi
- vpxor %ymm9,%ymm11,%ymm11
- rorxq $39,%r9,%r12
- leaq (%r8,%r13,1),%r8
- xorq %r10,%rdi
- vpaddq %ymm11,%ymm1,%ymm1
- rorxq $34,%r9,%r14
- rorxq $28,%r9,%r13
- leaq (%rax,%r8,1),%rax
- vpaddq -96(%rbp),%ymm1,%ymm10
- andq %rdi,%r15
- xorq %r12,%r14
- xorq %r10,%r15
- xorq %r13,%r14
- leaq (%r8,%r15,1),%r8
- movq %rbx,%r12
- vmovdqa %ymm10,32(%rsp)
- vpalignr $8,%ymm2,%ymm3,%ymm8
- addq 64+256(%rsp),%rdx
- andq %rax,%r12
- rorxq $41,%rax,%r13
- vpalignr $8,%ymm6,%ymm7,%ymm11
- rorxq $18,%rax,%r15
- leaq (%r8,%r14,1),%r8
- leaq (%rdx,%r12,1),%rdx
- vpsrlq $1,%ymm8,%ymm10
- andnq %rcx,%rax,%r12
- xorq %r15,%r13
- rorxq $14,%rax,%r14
- vpaddq %ymm11,%ymm2,%ymm2
- vpsrlq $7,%ymm8,%ymm11
- leaq (%rdx,%r12,1),%rdx
- xorq %r14,%r13
- movq %r8,%r15
- vpsllq $56,%ymm8,%ymm9
- vpxor %ymm10,%ymm11,%ymm8
- rorxq $39,%r8,%r12
- leaq (%rdx,%r13,1),%rdx
- xorq %r9,%r15
- vpsrlq $7,%ymm10,%ymm10
- vpxor %ymm9,%ymm8,%ymm8
- rorxq $34,%r8,%r14
- rorxq $28,%r8,%r13
- leaq (%r11,%rdx,1),%r11
- vpsllq $7,%ymm9,%ymm9
- vpxor %ymm10,%ymm8,%ymm8
- andq %r15,%rdi
- xorq %r12,%r14
- xorq %r9,%rdi
- vpsrlq $6,%ymm1,%ymm11
- vpxor %ymm9,%ymm8,%ymm8
- xorq %r13,%r14
- leaq (%rdx,%rdi,1),%rdx
- movq %rax,%r12
- vpsllq $3,%ymm1,%ymm10
- vpaddq %ymm8,%ymm2,%ymm2
- addq 72+256(%rsp),%rcx
- andq %r11,%r12
- rorxq $41,%r11,%r13
- vpsrlq $19,%ymm1,%ymm9
- vpxor %ymm10,%ymm11,%ymm11
- rorxq $18,%r11,%rdi
- leaq (%rdx,%r14,1),%rdx
- leaq (%rcx,%r12,1),%rcx
- vpsllq $42,%ymm10,%ymm10
- vpxor %ymm9,%ymm11,%ymm11
- andnq %rbx,%r11,%r12
- xorq %rdi,%r13
- rorxq $14,%r11,%r14
- vpsrlq $42,%ymm9,%ymm9
- vpxor %ymm10,%ymm11,%ymm11
- leaq (%rcx,%r12,1),%rcx
- xorq %r14,%r13
- movq %rdx,%rdi
- vpxor %ymm9,%ymm11,%ymm11
- rorxq $39,%rdx,%r12
- leaq (%rcx,%r13,1),%rcx
- xorq %r8,%rdi
- vpaddq %ymm11,%ymm2,%ymm2
- rorxq $34,%rdx,%r14
- rorxq $28,%rdx,%r13
- leaq (%r10,%rcx,1),%r10
- vpaddq -64(%rbp),%ymm2,%ymm10
- andq %rdi,%r15
- xorq %r12,%r14
- xorq %r8,%r15
- xorq %r13,%r14
- leaq (%rcx,%r15,1),%rcx
- movq %r11,%r12
- vmovdqa %ymm10,64(%rsp)
- vpalignr $8,%ymm3,%ymm4,%ymm8
- addq 96+256(%rsp),%rbx
- andq %r10,%r12
- rorxq $41,%r10,%r13
- vpalignr $8,%ymm7,%ymm0,%ymm11
- rorxq $18,%r10,%r15
- leaq (%rcx,%r14,1),%rcx
- leaq (%rbx,%r12,1),%rbx
- vpsrlq $1,%ymm8,%ymm10
- andnq %rax,%r10,%r12
- xorq %r15,%r13
- rorxq $14,%r10,%r14
- vpaddq %ymm11,%ymm3,%ymm3
- vpsrlq $7,%ymm8,%ymm11
- leaq (%rbx,%r12,1),%rbx
- xorq %r14,%r13
- movq %rcx,%r15
- vpsllq $56,%ymm8,%ymm9
- vpxor %ymm10,%ymm11,%ymm8
- rorxq $39,%rcx,%r12
- leaq (%rbx,%r13,1),%rbx
- xorq %rdx,%r15
- vpsrlq $7,%ymm10,%ymm10
- vpxor %ymm9,%ymm8,%ymm8
- rorxq $34,%rcx,%r14
- rorxq $28,%rcx,%r13
- leaq (%r9,%rbx,1),%r9
- vpsllq $7,%ymm9,%ymm9
- vpxor %ymm10,%ymm8,%ymm8
- andq %r15,%rdi
- xorq %r12,%r14
- xorq %rdx,%rdi
- vpsrlq $6,%ymm2,%ymm11
- vpxor %ymm9,%ymm8,%ymm8
- xorq %r13,%r14
- leaq (%rbx,%rdi,1),%rbx
- movq %r10,%r12
- vpsllq $3,%ymm2,%ymm10
- vpaddq %ymm8,%ymm3,%ymm3
- addq 104+256(%rsp),%rax
- andq %r9,%r12
- rorxq $41,%r9,%r13
- vpsrlq $19,%ymm2,%ymm9
- vpxor %ymm10,%ymm11,%ymm11
- rorxq $18,%r9,%rdi
- leaq (%rbx,%r14,1),%rbx
- leaq (%rax,%r12,1),%rax
- vpsllq $42,%ymm10,%ymm10
- vpxor %ymm9,%ymm11,%ymm11
- andnq %r11,%r9,%r12
- xorq %rdi,%r13
- rorxq $14,%r9,%r14
- vpsrlq $42,%ymm9,%ymm9
- vpxor %ymm10,%ymm11,%ymm11
- leaq (%rax,%r12,1),%rax
- xorq %r14,%r13
- movq %rbx,%rdi
- vpxor %ymm9,%ymm11,%ymm11
- rorxq $39,%rbx,%r12
- leaq (%rax,%r13,1),%rax
- xorq %rcx,%rdi
- vpaddq %ymm11,%ymm3,%ymm3
- rorxq $34,%rbx,%r14
- rorxq $28,%rbx,%r13
- leaq (%r8,%rax,1),%r8
- vpaddq -32(%rbp),%ymm3,%ymm10
- andq %rdi,%r15
- xorq %r12,%r14
- xorq %rcx,%r15
- xorq %r13,%r14
- leaq (%rax,%r15,1),%rax
- movq %r9,%r12
- vmovdqa %ymm10,96(%rsp)
- leaq -128(%rsp),%rsp
- vpalignr $8,%ymm4,%ymm5,%ymm8
- addq 0+256(%rsp),%r11
- andq %r8,%r12
- rorxq $41,%r8,%r13
- vpalignr $8,%ymm0,%ymm1,%ymm11
- rorxq $18,%r8,%r15
- leaq (%rax,%r14,1),%rax
- leaq (%r11,%r12,1),%r11
- vpsrlq $1,%ymm8,%ymm10
- andnq %r10,%r8,%r12
- xorq %r15,%r13
- rorxq $14,%r8,%r14
- vpaddq %ymm11,%ymm4,%ymm4
- vpsrlq $7,%ymm8,%ymm11
- leaq (%r11,%r12,1),%r11
- xorq %r14,%r13
- movq %rax,%r15
- vpsllq $56,%ymm8,%ymm9
- vpxor %ymm10,%ymm11,%ymm8
- rorxq $39,%rax,%r12
- leaq (%r11,%r13,1),%r11
- xorq %rbx,%r15
- vpsrlq $7,%ymm10,%ymm10
- vpxor %ymm9,%ymm8,%ymm8
- rorxq $34,%rax,%r14
- rorxq $28,%rax,%r13
- leaq (%rdx,%r11,1),%rdx
- vpsllq $7,%ymm9,%ymm9
- vpxor %ymm10,%ymm8,%ymm8
- andq %r15,%rdi
- xorq %r12,%r14
- xorq %rbx,%rdi
- vpsrlq $6,%ymm3,%ymm11
- vpxor %ymm9,%ymm8,%ymm8
- xorq %r13,%r14
- leaq (%r11,%rdi,1),%r11
- movq %r8,%r12
- vpsllq $3,%ymm3,%ymm10
- vpaddq %ymm8,%ymm4,%ymm4
- addq 8+256(%rsp),%r10
- andq %rdx,%r12
- rorxq $41,%rdx,%r13
- vpsrlq $19,%ymm3,%ymm9
- vpxor %ymm10,%ymm11,%ymm11
- rorxq $18,%rdx,%rdi
- leaq (%r11,%r14,1),%r11
- leaq (%r10,%r12,1),%r10
- vpsllq $42,%ymm10,%ymm10
- vpxor %ymm9,%ymm11,%ymm11
- andnq %r9,%rdx,%r12
- xorq %rdi,%r13
- rorxq $14,%rdx,%r14
- vpsrlq $42,%ymm9,%ymm9
- vpxor %ymm10,%ymm11,%ymm11
- leaq (%r10,%r12,1),%r10
- xorq %r14,%r13
- movq %r11,%rdi
- vpxor %ymm9,%ymm11,%ymm11
- rorxq $39,%r11,%r12
- leaq (%r10,%r13,1),%r10
- xorq %rax,%rdi
- vpaddq %ymm11,%ymm4,%ymm4
- rorxq $34,%r11,%r14
- rorxq $28,%r11,%r13
- leaq (%rcx,%r10,1),%rcx
- vpaddq 0(%rbp),%ymm4,%ymm10
- andq %rdi,%r15
- xorq %r12,%r14
- xorq %rax,%r15
- xorq %r13,%r14
- leaq (%r10,%r15,1),%r10
- movq %rdx,%r12
- vmovdqa %ymm10,0(%rsp)
- vpalignr $8,%ymm5,%ymm6,%ymm8
- addq 32+256(%rsp),%r9
- andq %rcx,%r12
- rorxq $41,%rcx,%r13
- vpalignr $8,%ymm1,%ymm2,%ymm11
- rorxq $18,%rcx,%r15
- leaq (%r10,%r14,1),%r10
- leaq (%r9,%r12,1),%r9
- vpsrlq $1,%ymm8,%ymm10
- andnq %r8,%rcx,%r12
- xorq %r15,%r13
- rorxq $14,%rcx,%r14
- vpaddq %ymm11,%ymm5,%ymm5
- vpsrlq $7,%ymm8,%ymm11
- leaq (%r9,%r12,1),%r9
- xorq %r14,%r13
- movq %r10,%r15
- vpsllq $56,%ymm8,%ymm9
- vpxor %ymm10,%ymm11,%ymm8
- rorxq $39,%r10,%r12
- leaq (%r9,%r13,1),%r9
- xorq %r11,%r15
- vpsrlq $7,%ymm10,%ymm10
- vpxor %ymm9,%ymm8,%ymm8
- rorxq $34,%r10,%r14
- rorxq $28,%r10,%r13
- leaq (%rbx,%r9,1),%rbx
- vpsllq $7,%ymm9,%ymm9
- vpxor %ymm10,%ymm8,%ymm8
- andq %r15,%rdi
- xorq %r12,%r14
- xorq %r11,%rdi
- vpsrlq $6,%ymm4,%ymm11
- vpxor %ymm9,%ymm8,%ymm8
- xorq %r13,%r14
- leaq (%r9,%rdi,1),%r9
- movq %rcx,%r12
- vpsllq $3,%ymm4,%ymm10
- vpaddq %ymm8,%ymm5,%ymm5
- addq 40+256(%rsp),%r8
- andq %rbx,%r12
- rorxq $41,%rbx,%r13
- vpsrlq $19,%ymm4,%ymm9
- vpxor %ymm10,%ymm11,%ymm11
- rorxq $18,%rbx,%rdi
- leaq (%r9,%r14,1),%r9
- leaq (%r8,%r12,1),%r8
- vpsllq $42,%ymm10,%ymm10
- vpxor %ymm9,%ymm11,%ymm11
- andnq %rdx,%rbx,%r12
- xorq %rdi,%r13
- rorxq $14,%rbx,%r14
- vpsrlq $42,%ymm9,%ymm9
- vpxor %ymm10,%ymm11,%ymm11
- leaq (%r8,%r12,1),%r8
- xorq %r14,%r13
- movq %r9,%rdi
- vpxor %ymm9,%ymm11,%ymm11
- rorxq $39,%r9,%r12
- leaq (%r8,%r13,1),%r8
- xorq %r10,%rdi
- vpaddq %ymm11,%ymm5,%ymm5
- rorxq $34,%r9,%r14
- rorxq $28,%r9,%r13
- leaq (%rax,%r8,1),%rax
- vpaddq 32(%rbp),%ymm5,%ymm10
- andq %rdi,%r15
- xorq %r12,%r14
- xorq %r10,%r15
- xorq %r13,%r14
- leaq (%r8,%r15,1),%r8
- movq %rbx,%r12
- vmovdqa %ymm10,32(%rsp)
- vpalignr $8,%ymm6,%ymm7,%ymm8
- addq 64+256(%rsp),%rdx
- andq %rax,%r12
- rorxq $41,%rax,%r13
- vpalignr $8,%ymm2,%ymm3,%ymm11
- rorxq $18,%rax,%r15
- leaq (%r8,%r14,1),%r8
- leaq (%rdx,%r12,1),%rdx
- vpsrlq $1,%ymm8,%ymm10
- andnq %rcx,%rax,%r12
- xorq %r15,%r13
- rorxq $14,%rax,%r14
- vpaddq %ymm11,%ymm6,%ymm6
- vpsrlq $7,%ymm8,%ymm11
- leaq (%rdx,%r12,1),%rdx
- xorq %r14,%r13
- movq %r8,%r15
- vpsllq $56,%ymm8,%ymm9
- vpxor %ymm10,%ymm11,%ymm8
- rorxq $39,%r8,%r12
- leaq (%rdx,%r13,1),%rdx
- xorq %r9,%r15
- vpsrlq $7,%ymm10,%ymm10
- vpxor %ymm9,%ymm8,%ymm8
- rorxq $34,%r8,%r14
- rorxq $28,%r8,%r13
- leaq (%r11,%rdx,1),%r11
- vpsllq $7,%ymm9,%ymm9
- vpxor %ymm10,%ymm8,%ymm8
- andq %r15,%rdi
- xorq %r12,%r14
- xorq %r9,%rdi
- vpsrlq $6,%ymm5,%ymm11
- vpxor %ymm9,%ymm8,%ymm8
- xorq %r13,%r14
- leaq (%rdx,%rdi,1),%rdx
- movq %rax,%r12
- vpsllq $3,%ymm5,%ymm10
- vpaddq %ymm8,%ymm6,%ymm6
- addq 72+256(%rsp),%rcx
- andq %r11,%r12
- rorxq $41,%r11,%r13
- vpsrlq $19,%ymm5,%ymm9
- vpxor %ymm10,%ymm11,%ymm11
- rorxq $18,%r11,%rdi
- leaq (%rdx,%r14,1),%rdx
- leaq (%rcx,%r12,1),%rcx
- vpsllq $42,%ymm10,%ymm10
- vpxor %ymm9,%ymm11,%ymm11
- andnq %rbx,%r11,%r12
- xorq %rdi,%r13
- rorxq $14,%r11,%r14
- vpsrlq $42,%ymm9,%ymm9
- vpxor %ymm10,%ymm11,%ymm11
- leaq (%rcx,%r12,1),%rcx
- xorq %r14,%r13
- movq %rdx,%rdi
- vpxor %ymm9,%ymm11,%ymm11
- rorxq $39,%rdx,%r12
- leaq (%rcx,%r13,1),%rcx
- xorq %r8,%rdi
- vpaddq %ymm11,%ymm6,%ymm6
- rorxq $34,%rdx,%r14
- rorxq $28,%rdx,%r13
- leaq (%r10,%rcx,1),%r10
- vpaddq 64(%rbp),%ymm6,%ymm10
- andq %rdi,%r15
- xorq %r12,%r14
- xorq %r8,%r15
- xorq %r13,%r14
- leaq (%rcx,%r15,1),%rcx
- movq %r11,%r12
- vmovdqa %ymm10,64(%rsp)
- vpalignr $8,%ymm7,%ymm0,%ymm8
- addq 96+256(%rsp),%rbx
- andq %r10,%r12
- rorxq $41,%r10,%r13
- vpalignr $8,%ymm3,%ymm4,%ymm11
- rorxq $18,%r10,%r15
- leaq (%rcx,%r14,1),%rcx
- leaq (%rbx,%r12,1),%rbx
- vpsrlq $1,%ymm8,%ymm10
- andnq %rax,%r10,%r12
- xorq %r15,%r13
- rorxq $14,%r10,%r14
- vpaddq %ymm11,%ymm7,%ymm7
- vpsrlq $7,%ymm8,%ymm11
- leaq (%rbx,%r12,1),%rbx
- xorq %r14,%r13
- movq %rcx,%r15
- vpsllq $56,%ymm8,%ymm9
- vpxor %ymm10,%ymm11,%ymm8
- rorxq $39,%rcx,%r12
- leaq (%rbx,%r13,1),%rbx
- xorq %rdx,%r15
- vpsrlq $7,%ymm10,%ymm10
- vpxor %ymm9,%ymm8,%ymm8
- rorxq $34,%rcx,%r14
- rorxq $28,%rcx,%r13
- leaq (%r9,%rbx,1),%r9
- vpsllq $7,%ymm9,%ymm9
- vpxor %ymm10,%ymm8,%ymm8
- andq %r15,%rdi
- xorq %r12,%r14
- xorq %rdx,%rdi
- vpsrlq $6,%ymm6,%ymm11
- vpxor %ymm9,%ymm8,%ymm8
- xorq %r13,%r14
- leaq (%rbx,%rdi,1),%rbx
- movq %r10,%r12
- vpsllq $3,%ymm6,%ymm10
- vpaddq %ymm8,%ymm7,%ymm7
- addq 104+256(%rsp),%rax
- andq %r9,%r12
- rorxq $41,%r9,%r13
- vpsrlq $19,%ymm6,%ymm9
- vpxor %ymm10,%ymm11,%ymm11
- rorxq $18,%r9,%rdi
- leaq (%rbx,%r14,1),%rbx
- leaq (%rax,%r12,1),%rax
- vpsllq $42,%ymm10,%ymm10
- vpxor %ymm9,%ymm11,%ymm11
- andnq %r11,%r9,%r12
- xorq %rdi,%r13
- rorxq $14,%r9,%r14
- vpsrlq $42,%ymm9,%ymm9
- vpxor %ymm10,%ymm11,%ymm11
- leaq (%rax,%r12,1),%rax
- xorq %r14,%r13
- movq %rbx,%rdi
- vpxor %ymm9,%ymm11,%ymm11
- rorxq $39,%rbx,%r12
- leaq (%rax,%r13,1),%rax
- xorq %rcx,%rdi
- vpaddq %ymm11,%ymm7,%ymm7
- rorxq $34,%rbx,%r14
- rorxq $28,%rbx,%r13
- leaq (%r8,%rax,1),%r8
- vpaddq 96(%rbp),%ymm7,%ymm10
- andq %rdi,%r15
- xorq %r12,%r14
- xorq %rcx,%r15
- xorq %r13,%r14
- leaq (%rax,%r15,1),%rax
- movq %r9,%r12
- vmovdqa %ymm10,96(%rsp)
- leaq 256(%rbp),%rbp
- cmpb $0,-121(%rbp)
- jne .Lavx2_00_47
- addq 0+128(%rsp),%r11
- andq %r8,%r12
- rorxq $41,%r8,%r13
- rorxq $18,%r8,%r15
- leaq (%rax,%r14,1),%rax
- leaq (%r11,%r12,1),%r11
- andnq %r10,%r8,%r12
- xorq %r15,%r13
- rorxq $14,%r8,%r14
- leaq (%r11,%r12,1),%r11
- xorq %r14,%r13
- movq %rax,%r15
- rorxq $39,%rax,%r12
- leaq (%r11,%r13,1),%r11
- xorq %rbx,%r15
- rorxq $34,%rax,%r14
- rorxq $28,%rax,%r13
- leaq (%rdx,%r11,1),%rdx
- andq %r15,%rdi
- xorq %r12,%r14
- xorq %rbx,%rdi
- xorq %r13,%r14
- leaq (%r11,%rdi,1),%r11
- movq %r8,%r12
- addq 8+128(%rsp),%r10
- andq %rdx,%r12
- rorxq $41,%rdx,%r13
- rorxq $18,%rdx,%rdi
- leaq (%r11,%r14,1),%r11
- leaq (%r10,%r12,1),%r10
- andnq %r9,%rdx,%r12
- xorq %rdi,%r13
- rorxq $14,%rdx,%r14
- leaq (%r10,%r12,1),%r10
- xorq %r14,%r13
- movq %r11,%rdi
- rorxq $39,%r11,%r12
- leaq (%r10,%r13,1),%r10
- xorq %rax,%rdi
- rorxq $34,%r11,%r14
- rorxq $28,%r11,%r13
- leaq (%rcx,%r10,1),%rcx
- andq %rdi,%r15
- xorq %r12,%r14
- xorq %rax,%r15
- xorq %r13,%r14
- leaq (%r10,%r15,1),%r10
- movq %rdx,%r12
- addq 32+128(%rsp),%r9
- andq %rcx,%r12
- rorxq $41,%rcx,%r13
- rorxq $18,%rcx,%r15
- leaq (%r10,%r14,1),%r10
- leaq (%r9,%r12,1),%r9
- andnq %r8,%rcx,%r12
- xorq %r15,%r13
- rorxq $14,%rcx,%r14
- leaq (%r9,%r12,1),%r9
- xorq %r14,%r13
- movq %r10,%r15
- rorxq $39,%r10,%r12
- leaq (%r9,%r13,1),%r9
- xorq %r11,%r15
- rorxq $34,%r10,%r14
- rorxq $28,%r10,%r13
- leaq (%rbx,%r9,1),%rbx
- andq %r15,%rdi
- xorq %r12,%r14
- xorq %r11,%rdi
- xorq %r13,%r14
- leaq (%r9,%rdi,1),%r9
- movq %rcx,%r12
- addq 40+128(%rsp),%r8
- andq %rbx,%r12
- rorxq $41,%rbx,%r13
- rorxq $18,%rbx,%rdi
- leaq (%r9,%r14,1),%r9
- leaq (%r8,%r12,1),%r8
- andnq %rdx,%rbx,%r12
- xorq %rdi,%r13
- rorxq $14,%rbx,%r14
- leaq (%r8,%r12,1),%r8
- xorq %r14,%r13
- movq %r9,%rdi
- rorxq $39,%r9,%r12
- leaq (%r8,%r13,1),%r8
- xorq %r10,%rdi
- rorxq $34,%r9,%r14
- rorxq $28,%r9,%r13
- leaq (%rax,%r8,1),%rax
- andq %rdi,%r15
- xorq %r12,%r14
- xorq %r10,%r15
- xorq %r13,%r14
- leaq (%r8,%r15,1),%r8
- movq %rbx,%r12
- addq 64+128(%rsp),%rdx
- andq %rax,%r12
- rorxq $41,%rax,%r13
- rorxq $18,%rax,%r15
- leaq (%r8,%r14,1),%r8
- leaq (%rdx,%r12,1),%rdx
- andnq %rcx,%rax,%r12
- xorq %r15,%r13
- rorxq $14,%rax,%r14
- leaq (%rdx,%r12,1),%rdx
- xorq %r14,%r13
- movq %r8,%r15
- rorxq $39,%r8,%r12
- leaq (%rdx,%r13,1),%rdx
- xorq %r9,%r15
- rorxq $34,%r8,%r14
- rorxq $28,%r8,%r13
- leaq (%r11,%rdx,1),%r11
- andq %r15,%rdi
- xorq %r12,%r14
- xorq %r9,%rdi
- xorq %r13,%r14
- leaq (%rdx,%rdi,1),%rdx
- movq %rax,%r12
- addq 72+128(%rsp),%rcx
- andq %r11,%r12
- rorxq $41,%r11,%r13
- rorxq $18,%r11,%rdi
- leaq (%rdx,%r14,1),%rdx
- leaq (%rcx,%r12,1),%rcx
- andnq %rbx,%r11,%r12
- xorq %rdi,%r13
- rorxq $14,%r11,%r14
- leaq (%rcx,%r12,1),%rcx
- xorq %r14,%r13
- movq %rdx,%rdi
- rorxq $39,%rdx,%r12
- leaq (%rcx,%r13,1),%rcx
- xorq %r8,%rdi
- rorxq $34,%rdx,%r14
- rorxq $28,%rdx,%r13
- leaq (%r10,%rcx,1),%r10
- andq %rdi,%r15
- xorq %r12,%r14
- xorq %r8,%r15
- xorq %r13,%r14
- leaq (%rcx,%r15,1),%rcx
- movq %r11,%r12
- addq 96+128(%rsp),%rbx
- andq %r10,%r12
- rorxq $41,%r10,%r13
- rorxq $18,%r10,%r15
- leaq (%rcx,%r14,1),%rcx
- leaq (%rbx,%r12,1),%rbx
- andnq %rax,%r10,%r12
- xorq %r15,%r13
- rorxq $14,%r10,%r14
- leaq (%rbx,%r12,1),%rbx
- xorq %r14,%r13
- movq %rcx,%r15
- rorxq $39,%rcx,%r12
- leaq (%rbx,%r13,1),%rbx
- xorq %rdx,%r15
- rorxq $34,%rcx,%r14
- rorxq $28,%rcx,%r13
- leaq (%r9,%rbx,1),%r9
- andq %r15,%rdi
- xorq %r12,%r14
- xorq %rdx,%rdi
- xorq %r13,%r14
- leaq (%rbx,%rdi,1),%rbx
- movq %r10,%r12
- addq 104+128(%rsp),%rax
- andq %r9,%r12
- rorxq $41,%r9,%r13
- rorxq $18,%r9,%rdi
- leaq (%rbx,%r14,1),%rbx
- leaq (%rax,%r12,1),%rax
- andnq %r11,%r9,%r12
- xorq %rdi,%r13
- rorxq $14,%r9,%r14
- leaq (%rax,%r12,1),%rax
- xorq %r14,%r13
- movq %rbx,%rdi
- rorxq $39,%rbx,%r12
- leaq (%rax,%r13,1),%rax
- xorq %rcx,%rdi
- rorxq $34,%rbx,%r14
- rorxq $28,%rbx,%r13
- leaq (%r8,%rax,1),%r8
- andq %rdi,%r15
- xorq %r12,%r14
- xorq %rcx,%r15
- xorq %r13,%r14
- leaq (%rax,%r15,1),%rax
- movq %r9,%r12
- addq 0(%rsp),%r11
- andq %r8,%r12
- rorxq $41,%r8,%r13
- rorxq $18,%r8,%r15
- leaq (%rax,%r14,1),%rax
- leaq (%r11,%r12,1),%r11
- andnq %r10,%r8,%r12
- xorq %r15,%r13
- rorxq $14,%r8,%r14
- leaq (%r11,%r12,1),%r11
- xorq %r14,%r13
- movq %rax,%r15
- rorxq $39,%rax,%r12
- leaq (%r11,%r13,1),%r11
- xorq %rbx,%r15
- rorxq $34,%rax,%r14
- rorxq $28,%rax,%r13
- leaq (%rdx,%r11,1),%rdx
- andq %r15,%rdi
- xorq %r12,%r14
- xorq %rbx,%rdi
- xorq %r13,%r14
- leaq (%r11,%rdi,1),%r11
- movq %r8,%r12
- addq 8(%rsp),%r10
- andq %rdx,%r12
- rorxq $41,%rdx,%r13
- rorxq $18,%rdx,%rdi
- leaq (%r11,%r14,1),%r11
- leaq (%r10,%r12,1),%r10
- andnq %r9,%rdx,%r12
- xorq %rdi,%r13
- rorxq $14,%rdx,%r14
- leaq (%r10,%r12,1),%r10
- xorq %r14,%r13
- movq %r11,%rdi
- rorxq $39,%r11,%r12
- leaq (%r10,%r13,1),%r10
- xorq %rax,%rdi
- rorxq $34,%r11,%r14
- rorxq $28,%r11,%r13
- leaq (%rcx,%r10,1),%rcx
- andq %rdi,%r15
- xorq %r12,%r14
- xorq %rax,%r15
- xorq %r13,%r14
- leaq (%r10,%r15,1),%r10
- movq %rdx,%r12
- addq 32(%rsp),%r9
- andq %rcx,%r12
- rorxq $41,%rcx,%r13
- rorxq $18,%rcx,%r15
- leaq (%r10,%r14,1),%r10
- leaq (%r9,%r12,1),%r9
- andnq %r8,%rcx,%r12
- xorq %r15,%r13
- rorxq $14,%rcx,%r14
- leaq (%r9,%r12,1),%r9
- xorq %r14,%r13
- movq %r10,%r15
- rorxq $39,%r10,%r12
- leaq (%r9,%r13,1),%r9
- xorq %r11,%r15
- rorxq $34,%r10,%r14
- rorxq $28,%r10,%r13
- leaq (%rbx,%r9,1),%rbx
- andq %r15,%rdi
- xorq %r12,%r14
- xorq %r11,%rdi
- xorq %r13,%r14
- leaq (%r9,%rdi,1),%r9
- movq %rcx,%r12
- addq 40(%rsp),%r8
- andq %rbx,%r12
- rorxq $41,%rbx,%r13
- rorxq $18,%rbx,%rdi
- leaq (%r9,%r14,1),%r9
- leaq (%r8,%r12,1),%r8
- andnq %rdx,%rbx,%r12
- xorq %rdi,%r13
- rorxq $14,%rbx,%r14
- leaq (%r8,%r12,1),%r8
- xorq %r14,%r13
- movq %r9,%rdi
- rorxq $39,%r9,%r12
- leaq (%r8,%r13,1),%r8
- xorq %r10,%rdi
- rorxq $34,%r9,%r14
- rorxq $28,%r9,%r13
- leaq (%rax,%r8,1),%rax
- andq %rdi,%r15
- xorq %r12,%r14
- xorq %r10,%r15
- xorq %r13,%r14
- leaq (%r8,%r15,1),%r8
- movq %rbx,%r12
- addq 64(%rsp),%rdx
- andq %rax,%r12
- rorxq $41,%rax,%r13
- rorxq $18,%rax,%r15
- leaq (%r8,%r14,1),%r8
- leaq (%rdx,%r12,1),%rdx
- andnq %rcx,%rax,%r12
- xorq %r15,%r13
- rorxq $14,%rax,%r14
- leaq (%rdx,%r12,1),%rdx
- xorq %r14,%r13
- movq %r8,%r15
- rorxq $39,%r8,%r12
- leaq (%rdx,%r13,1),%rdx
- xorq %r9,%r15
- rorxq $34,%r8,%r14
- rorxq $28,%r8,%r13
- leaq (%r11,%rdx,1),%r11
- andq %r15,%rdi
- xorq %r12,%r14
- xorq %r9,%rdi
- xorq %r13,%r14
- leaq (%rdx,%rdi,1),%rdx
- movq %rax,%r12
- addq 72(%rsp),%rcx
- andq %r11,%r12
- rorxq $41,%r11,%r13
- rorxq $18,%r11,%rdi
- leaq (%rdx,%r14,1),%rdx
- leaq (%rcx,%r12,1),%rcx
- andnq %rbx,%r11,%r12
- xorq %rdi,%r13
- rorxq $14,%r11,%r14
- leaq (%rcx,%r12,1),%rcx
- xorq %r14,%r13
- movq %rdx,%rdi
- rorxq $39,%rdx,%r12
- leaq (%rcx,%r13,1),%rcx
- xorq %r8,%rdi
- rorxq $34,%rdx,%r14
- rorxq $28,%rdx,%r13
- leaq (%r10,%rcx,1),%r10
- andq %rdi,%r15
- xorq %r12,%r14
- xorq %r8,%r15
- xorq %r13,%r14
- leaq (%rcx,%r15,1),%rcx
- movq %r11,%r12
- addq 96(%rsp),%rbx
- andq %r10,%r12
- rorxq $41,%r10,%r13
- rorxq $18,%r10,%r15
- leaq (%rcx,%r14,1),%rcx
- leaq (%rbx,%r12,1),%rbx
- andnq %rax,%r10,%r12
- xorq %r15,%r13
- rorxq $14,%r10,%r14
- leaq (%rbx,%r12,1),%rbx
- xorq %r14,%r13
- movq %rcx,%r15
- rorxq $39,%rcx,%r12
- leaq (%rbx,%r13,1),%rbx
- xorq %rdx,%r15
- rorxq $34,%rcx,%r14
- rorxq $28,%rcx,%r13
- leaq (%r9,%rbx,1),%r9
- andq %r15,%rdi
- xorq %r12,%r14
- xorq %rdx,%rdi
- xorq %r13,%r14
- leaq (%rbx,%rdi,1),%rbx
- movq %r10,%r12
- addq 104(%rsp),%rax
- andq %r9,%r12
- rorxq $41,%r9,%r13
- rorxq $18,%r9,%rdi
- leaq (%rbx,%r14,1),%rbx
- leaq (%rax,%r12,1),%rax
- andnq %r11,%r9,%r12
- xorq %rdi,%r13
- rorxq $14,%r9,%r14
- leaq (%rax,%r12,1),%rax
- xorq %r14,%r13
- movq %rbx,%rdi
- rorxq $39,%rbx,%r12
- leaq (%rax,%r13,1),%rax
- xorq %rcx,%rdi
- rorxq $34,%rbx,%r14
- rorxq $28,%rbx,%r13
- leaq (%r8,%rax,1),%r8
- andq %rdi,%r15
- xorq %r12,%r14
- xorq %rcx,%r15
- xorq %r13,%r14
- leaq (%rax,%r15,1),%rax
- movq %r9,%r12
- movq 1280(%rsp),%rdi
- addq %r14,%rax
-
- leaq 1152(%rsp),%rbp
-
- addq 0(%rdi),%rax
- addq 8(%rdi),%rbx
- addq 16(%rdi),%rcx
- addq 24(%rdi),%rdx
- addq 32(%rdi),%r8
- addq 40(%rdi),%r9
- addq 48(%rdi),%r10
- addq 56(%rdi),%r11
-
- movq %rax,0(%rdi)
- movq %rbx,8(%rdi)
- movq %rcx,16(%rdi)
- movq %rdx,24(%rdi)
- movq %r8,32(%rdi)
- movq %r9,40(%rdi)
- movq %r10,48(%rdi)
- movq %r11,56(%rdi)
-
- cmpq 144(%rbp),%rsi
- je .Ldone_avx2
-
- xorq %r14,%r14
- movq %rbx,%rdi
- xorq %rcx,%rdi
- movq %r9,%r12
- jmp .Lower_avx2
-.align 16
-.Lower_avx2:
- addq 0+16(%rbp),%r11
- andq %r8,%r12
- rorxq $41,%r8,%r13
- rorxq $18,%r8,%r15
- leaq (%rax,%r14,1),%rax
- leaq (%r11,%r12,1),%r11
- andnq %r10,%r8,%r12
- xorq %r15,%r13
- rorxq $14,%r8,%r14
- leaq (%r11,%r12,1),%r11
- xorq %r14,%r13
- movq %rax,%r15
- rorxq $39,%rax,%r12
- leaq (%r11,%r13,1),%r11
- xorq %rbx,%r15
- rorxq $34,%rax,%r14
- rorxq $28,%rax,%r13
- leaq (%rdx,%r11,1),%rdx
- andq %r15,%rdi
- xorq %r12,%r14
- xorq %rbx,%rdi
- xorq %r13,%r14
- leaq (%r11,%rdi,1),%r11
- movq %r8,%r12
- addq 8+16(%rbp),%r10
- andq %rdx,%r12
- rorxq $41,%rdx,%r13
- rorxq $18,%rdx,%rdi
- leaq (%r11,%r14,1),%r11
- leaq (%r10,%r12,1),%r10
- andnq %r9,%rdx,%r12
- xorq %rdi,%r13
- rorxq $14,%rdx,%r14
- leaq (%r10,%r12,1),%r10
- xorq %r14,%r13
- movq %r11,%rdi
- rorxq $39,%r11,%r12
- leaq (%r10,%r13,1),%r10
- xorq %rax,%rdi
- rorxq $34,%r11,%r14
- rorxq $28,%r11,%r13
- leaq (%rcx,%r10,1),%rcx
- andq %rdi,%r15
- xorq %r12,%r14
- xorq %rax,%r15
- xorq %r13,%r14
- leaq (%r10,%r15,1),%r10
- movq %rdx,%r12
- addq 32+16(%rbp),%r9
- andq %rcx,%r12
- rorxq $41,%rcx,%r13
- rorxq $18,%rcx,%r15
- leaq (%r10,%r14,1),%r10
- leaq (%r9,%r12,1),%r9
- andnq %r8,%rcx,%r12
- xorq %r15,%r13
- rorxq $14,%rcx,%r14
- leaq (%r9,%r12,1),%r9
- xorq %r14,%r13
- movq %r10,%r15
- rorxq $39,%r10,%r12
- leaq (%r9,%r13,1),%r9
- xorq %r11,%r15
- rorxq $34,%r10,%r14
- rorxq $28,%r10,%r13
- leaq (%rbx,%r9,1),%rbx
- andq %r15,%rdi
- xorq %r12,%r14
- xorq %r11,%rdi
- xorq %r13,%r14
- leaq (%r9,%rdi,1),%r9
- movq %rcx,%r12
- addq 40+16(%rbp),%r8
- andq %rbx,%r12
- rorxq $41,%rbx,%r13
- rorxq $18,%rbx,%rdi
- leaq (%r9,%r14,1),%r9
- leaq (%r8,%r12,1),%r8
- andnq %rdx,%rbx,%r12
- xorq %rdi,%r13
- rorxq $14,%rbx,%r14
- leaq (%r8,%r12,1),%r8
- xorq %r14,%r13
- movq %r9,%rdi
- rorxq $39,%r9,%r12
- leaq (%r8,%r13,1),%r8
- xorq %r10,%rdi
- rorxq $34,%r9,%r14
- rorxq $28,%r9,%r13
- leaq (%rax,%r8,1),%rax
- andq %rdi,%r15
- xorq %r12,%r14
- xorq %r10,%r15
- xorq %r13,%r14
- leaq (%r8,%r15,1),%r8
- movq %rbx,%r12
- addq 64+16(%rbp),%rdx
- andq %rax,%r12
- rorxq $41,%rax,%r13
- rorxq $18,%rax,%r15
- leaq (%r8,%r14,1),%r8
- leaq (%rdx,%r12,1),%rdx
- andnq %rcx,%rax,%r12
- xorq %r15,%r13
- rorxq $14,%rax,%r14
- leaq (%rdx,%r12,1),%rdx
- xorq %r14,%r13
- movq %r8,%r15
- rorxq $39,%r8,%r12
- leaq (%rdx,%r13,1),%rdx
- xorq %r9,%r15
- rorxq $34,%r8,%r14
- rorxq $28,%r8,%r13
- leaq (%r11,%rdx,1),%r11
- andq %r15,%rdi
- xorq %r12,%r14
- xorq %r9,%rdi
- xorq %r13,%r14
- leaq (%rdx,%rdi,1),%rdx
- movq %rax,%r12
- addq 72+16(%rbp),%rcx
- andq %r11,%r12
- rorxq $41,%r11,%r13
- rorxq $18,%r11,%rdi
- leaq (%rdx,%r14,1),%rdx
- leaq (%rcx,%r12,1),%rcx
- andnq %rbx,%r11,%r12
- xorq %rdi,%r13
- rorxq $14,%r11,%r14
- leaq (%rcx,%r12,1),%rcx
- xorq %r14,%r13
- movq %rdx,%rdi
- rorxq $39,%rdx,%r12
- leaq (%rcx,%r13,1),%rcx
- xorq %r8,%rdi
- rorxq $34,%rdx,%r14
- rorxq $28,%rdx,%r13
- leaq (%r10,%rcx,1),%r10
- andq %rdi,%r15
- xorq %r12,%r14
- xorq %r8,%r15
- xorq %r13,%r14
- leaq (%rcx,%r15,1),%rcx
- movq %r11,%r12
- addq 96+16(%rbp),%rbx
- andq %r10,%r12
- rorxq $41,%r10,%r13
- rorxq $18,%r10,%r15
- leaq (%rcx,%r14,1),%rcx
- leaq (%rbx,%r12,1),%rbx
- andnq %rax,%r10,%r12
- xorq %r15,%r13
- rorxq $14,%r10,%r14
- leaq (%rbx,%r12,1),%rbx
- xorq %r14,%r13
- movq %rcx,%r15
- rorxq $39,%rcx,%r12
- leaq (%rbx,%r13,1),%rbx
- xorq %rdx,%r15
- rorxq $34,%rcx,%r14
- rorxq $28,%rcx,%r13
- leaq (%r9,%rbx,1),%r9
- andq %r15,%rdi
- xorq %r12,%r14
- xorq %rdx,%rdi
- xorq %r13,%r14
- leaq (%rbx,%rdi,1),%rbx
- movq %r10,%r12
- addq 104+16(%rbp),%rax
- andq %r9,%r12
- rorxq $41,%r9,%r13
- rorxq $18,%r9,%rdi
- leaq (%rbx,%r14,1),%rbx
- leaq (%rax,%r12,1),%rax
- andnq %r11,%r9,%r12
- xorq %rdi,%r13
- rorxq $14,%r9,%r14
- leaq (%rax,%r12,1),%rax
- xorq %r14,%r13
- movq %rbx,%rdi
- rorxq $39,%rbx,%r12
- leaq (%rax,%r13,1),%rax
- xorq %rcx,%rdi
- rorxq $34,%rbx,%r14
- rorxq $28,%rbx,%r13
- leaq (%r8,%rax,1),%r8
- andq %rdi,%r15
- xorq %r12,%r14
- xorq %rcx,%r15
- xorq %r13,%r14
- leaq (%rax,%r15,1),%rax
- movq %r9,%r12
- leaq -128(%rbp),%rbp
- cmpq %rsp,%rbp
- jae .Lower_avx2
-
- movq 1280(%rsp),%rdi
- addq %r14,%rax
-
- leaq 1152(%rsp),%rsp
-
- addq 0(%rdi),%rax
- addq 8(%rdi),%rbx
- addq 16(%rdi),%rcx
- addq 24(%rdi),%rdx
- addq 32(%rdi),%r8
- addq 40(%rdi),%r9
- leaq 256(%rsi),%rsi
- addq 48(%rdi),%r10
- movq %rsi,%r12
- addq 56(%rdi),%r11
- cmpq 128+16(%rsp),%rsi
-
- movq %rax,0(%rdi)
- cmoveq %rsp,%r12
- movq %rbx,8(%rdi)
- movq %rcx,16(%rdi)
- movq %rdx,24(%rdi)
- movq %r8,32(%rdi)
- movq %r9,40(%rdi)
- movq %r10,48(%rdi)
- movq %r11,56(%rdi)
-
- jbe .Loop_avx2
- leaq (%rsp),%rbp
-
-.Ldone_avx2:
- leaq (%rbp),%rsp
- movq 152(%rsp),%rsi
-.cfi_def_cfa %rsi,8
- vzeroupper
- movq -48(%rsi),%r15
-.cfi_restore %r15
- movq -40(%rsi),%r14
-.cfi_restore %r14
- movq -32(%rsi),%r13
-.cfi_restore %r13
- movq -24(%rsi),%r12
-.cfi_restore %r12
- movq -16(%rsi),%rbp
-.cfi_restore %rbp
- movq -8(%rsi),%rbx
-.cfi_restore %rbx
- leaq (%rsi),%rsp
-.cfi_def_cfa_register %rsp
-.Lepilogue_avx2:
- .byte 0xf3,0xc3
-.cfi_endproc
-.size sha512_block_data_order_avx2,.-sha512_block_data_order_avx2
diff --git a/secure/lib/libcrypto/amd64/x25519-x86_64.S b/secure/lib/libcrypto/amd64/x25519-x86_64.S
index 7786d365a695b..28063bf95b003 100644
--- a/secure/lib/libcrypto/amd64/x25519-x86_64.S
+++ b/secure/lib/libcrypto/amd64/x25519-x86_64.S
@@ -397,398 +397,32 @@ x25519_fe51_mul121666:
.Lfe51_mul121666_epilogue:
.cfi_endproc
.size x25519_fe51_mul121666,.-x25519_fe51_mul121666
-
.globl x25519_fe64_eligible
.type x25519_fe64_eligible,@function
.align 32
x25519_fe64_eligible:
- movl OPENSSL_ia32cap_P+8(%rip),%ecx
+.cfi_startproc
xorl %eax,%eax
- andl $0x80100,%ecx
- cmpl $0x80100,%ecx
- cmovel %ecx,%eax
.byte 0xf3,0xc3
+.cfi_endproc
.size x25519_fe64_eligible,.-x25519_fe64_eligible
.globl x25519_fe64_mul
.type x25519_fe64_mul,@function
-.align 32
-x25519_fe64_mul:
-.cfi_startproc
- pushq %rbp
-.cfi_adjust_cfa_offset 8
-.cfi_offset %rbp,-16
- pushq %rbx
-.cfi_adjust_cfa_offset 8
-.cfi_offset %rbx,-24
- pushq %r12
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r12,-32
- pushq %r13
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r13,-40
- pushq %r14
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r14,-48
- pushq %r15
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r15,-56
- pushq %rdi
-.cfi_adjust_cfa_offset 8
-.cfi_offset %rdi,-64
- leaq -16(%rsp),%rsp
-.cfi_adjust_cfa_offset 16
-.Lfe64_mul_body:
-
- movq %rdx,%rax
- movq 0(%rdx),%rbp
- movq 0(%rsi),%rdx
- movq 8(%rax),%rcx
- movq 16(%rax),%r14
- movq 24(%rax),%r15
-
- mulxq %rbp,%r8,%rax
- xorl %edi,%edi
- mulxq %rcx,%r9,%rbx
- adcxq %rax,%r9
- mulxq %r14,%r10,%rax
- adcxq %rbx,%r10
- mulxq %r15,%r11,%r12
- movq 8(%rsi),%rdx
- adcxq %rax,%r11
- movq %r14,(%rsp)
- adcxq %rdi,%r12
-
- mulxq %rbp,%rax,%rbx
- adoxq %rax,%r9
- adcxq %rbx,%r10
- mulxq %rcx,%rax,%rbx
- adoxq %rax,%r10
- adcxq %rbx,%r11
- mulxq %r14,%rax,%rbx
- adoxq %rax,%r11
- adcxq %rbx,%r12
- mulxq %r15,%rax,%r13
- movq 16(%rsi),%rdx
- adoxq %rax,%r12
- adcxq %rdi,%r13
- adoxq %rdi,%r13
-
- mulxq %rbp,%rax,%rbx
- adcxq %rax,%r10
- adoxq %rbx,%r11
- mulxq %rcx,%rax,%rbx
- adcxq %rax,%r11
- adoxq %rbx,%r12
- mulxq %r14,%rax,%rbx
- adcxq %rax,%r12
- adoxq %rbx,%r13
- mulxq %r15,%rax,%r14
- movq 24(%rsi),%rdx
- adcxq %rax,%r13
- adoxq %rdi,%r14
- adcxq %rdi,%r14
-
- mulxq %rbp,%rax,%rbx
- adoxq %rax,%r11
- adcxq %rbx,%r12
- mulxq %rcx,%rax,%rbx
- adoxq %rax,%r12
- adcxq %rbx,%r13
- mulxq (%rsp),%rax,%rbx
- adoxq %rax,%r13
- adcxq %rbx,%r14
- mulxq %r15,%rax,%r15
- movl $38,%edx
- adoxq %rax,%r14
- adcxq %rdi,%r15
- adoxq %rdi,%r15
-
- jmp .Lreduce64
-.Lfe64_mul_epilogue:
-.cfi_endproc
-.size x25519_fe64_mul,.-x25519_fe64_mul
-
.globl x25519_fe64_sqr
-.type x25519_fe64_sqr,@function
-.align 32
-x25519_fe64_sqr:
-.cfi_startproc
- pushq %rbp
-.cfi_adjust_cfa_offset 8
-.cfi_offset %rbp,-16
- pushq %rbx
-.cfi_adjust_cfa_offset 8
-.cfi_offset %rbx,-24
- pushq %r12
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r12,-32
- pushq %r13
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r13,-40
- pushq %r14
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r14,-48
- pushq %r15
-.cfi_adjust_cfa_offset 8
-.cfi_offset %r15,-56
- pushq %rdi
-.cfi_adjust_cfa_offset 8
-.cfi_offset %rdi,-64
- leaq -16(%rsp),%rsp
-.cfi_adjust_cfa_offset 16
-.Lfe64_sqr_body:
-
- movq 0(%rsi),%rdx
- movq 8(%rsi),%rcx
- movq 16(%rsi),%rbp
- movq 24(%rsi),%rsi
-
-
- mulxq %rdx,%r8,%r15
- mulxq %rcx,%r9,%rax
- xorl %edi,%edi
- mulxq %rbp,%r10,%rbx
- adcxq %rax,%r10
- mulxq %rsi,%r11,%r12
- movq %rcx,%rdx
- adcxq %rbx,%r11
- adcxq %rdi,%r12
-
-
- mulxq %rbp,%rax,%rbx
- adoxq %rax,%r11
- adcxq %rbx,%r12
- mulxq %rsi,%rax,%r13
- movq %rbp,%rdx
- adoxq %rax,%r12
- adcxq %rdi,%r13
-
-
- mulxq %rsi,%rax,%r14
- movq %rcx,%rdx
- adoxq %rax,%r13
- adcxq %rdi,%r14
- adoxq %rdi,%r14
-
- adcxq %r9,%r9
- adoxq %r15,%r9
- adcxq %r10,%r10
- mulxq %rdx,%rax,%rbx
- movq %rbp,%rdx
- adcxq %r11,%r11
- adoxq %rax,%r10
- adcxq %r12,%r12
- adoxq %rbx,%r11
- mulxq %rdx,%rax,%rbx
- movq %rsi,%rdx
- adcxq %r13,%r13
- adoxq %rax,%r12
- adcxq %r14,%r14
- adoxq %rbx,%r13
- mulxq %rdx,%rax,%r15
- movl $38,%edx
- adoxq %rax,%r14
- adcxq %rdi,%r15
- adoxq %rdi,%r15
- jmp .Lreduce64
-
-.align 32
-.Lreduce64:
- mulxq %r12,%rax,%rbx
- adcxq %rax,%r8
- adoxq %rbx,%r9
- mulxq %r13,%rax,%rbx
- adcxq %rax,%r9
- adoxq %rbx,%r10
- mulxq %r14,%rax,%rbx
- adcxq %rax,%r10
- adoxq %rbx,%r11
- mulxq %r15,%rax,%r12
- adcxq %rax,%r11
- adoxq %rdi,%r12
- adcxq %rdi,%r12
-
- movq 16(%rsp),%rdi
- imulq %rdx,%r12
-
- addq %r12,%r8
- adcq $0,%r9
- adcq $0,%r10
- adcq $0,%r11
-
- sbbq %rax,%rax
- andq $38,%rax
-
- addq %rax,%r8
- movq %r9,8(%rdi)
- movq %r10,16(%rdi)
- movq %r11,24(%rdi)
- movq %r8,0(%rdi)
-
- movq 24(%rsp),%r15
-.cfi_restore %r15
- movq 32(%rsp),%r14
-.cfi_restore %r14
- movq 40(%rsp),%r13
-.cfi_restore %r13
- movq 48(%rsp),%r12
-.cfi_restore %r12
- movq 56(%rsp),%rbx
-.cfi_restore %rbx
- movq 64(%rsp),%rbp
-.cfi_restore %rbp
- leaq 72(%rsp),%rsp
-.cfi_adjust_cfa_offset 88
-.Lfe64_sqr_epilogue:
- .byte 0xf3,0xc3
-.cfi_endproc
-.size x25519_fe64_sqr,.-x25519_fe64_sqr
-
.globl x25519_fe64_mul121666
-.type x25519_fe64_mul121666,@function
-.align 32
-x25519_fe64_mul121666:
-.Lfe64_mul121666_body:
- movl $121666,%edx
- mulxq 0(%rsi),%r8,%rcx
- mulxq 8(%rsi),%r9,%rax
- addq %rcx,%r9
- mulxq 16(%rsi),%r10,%rcx
- adcq %rax,%r10
- mulxq 24(%rsi),%r11,%rax
- adcq %rcx,%r11
- adcq $0,%rax
-
- imulq $38,%rax,%rax
-
- addq %rax,%r8
- adcq $0,%r9
- adcq $0,%r10
- adcq $0,%r11
-
- sbbq %rax,%rax
- andq $38,%rax
-
- addq %rax,%r8
- movq %r9,8(%rdi)
- movq %r10,16(%rdi)
- movq %r11,24(%rdi)
- movq %r8,0(%rdi)
-
-.Lfe64_mul121666_epilogue:
- .byte 0xf3,0xc3
-.size x25519_fe64_mul121666,.-x25519_fe64_mul121666
-
.globl x25519_fe64_add
-.type x25519_fe64_add,@function
-.align 32
-x25519_fe64_add:
-.Lfe64_add_body:
- movq 0(%rsi),%r8
- movq 8(%rsi),%r9
- movq 16(%rsi),%r10
- movq 24(%rsi),%r11
-
- addq 0(%rdx),%r8
- adcq 8(%rdx),%r9
- adcq 16(%rdx),%r10
- adcq 24(%rdx),%r11
-
- sbbq %rax,%rax
- andq $38,%rax
-
- addq %rax,%r8
- adcq $0,%r9
- adcq $0,%r10
- movq %r9,8(%rdi)
- adcq $0,%r11
- movq %r10,16(%rdi)
- sbbq %rax,%rax
- movq %r11,24(%rdi)
- andq $38,%rax
-
- addq %rax,%r8
- movq %r8,0(%rdi)
-
-.Lfe64_add_epilogue:
- .byte 0xf3,0xc3
-.size x25519_fe64_add,.-x25519_fe64_add
-
.globl x25519_fe64_sub
-.type x25519_fe64_sub,@function
-.align 32
-x25519_fe64_sub:
-.Lfe64_sub_body:
- movq 0(%rsi),%r8
- movq 8(%rsi),%r9
- movq 16(%rsi),%r10
- movq 24(%rsi),%r11
-
- subq 0(%rdx),%r8
- sbbq 8(%rdx),%r9
- sbbq 16(%rdx),%r10
- sbbq 24(%rdx),%r11
-
- sbbq %rax,%rax
- andq $38,%rax
-
- subq %rax,%r8
- sbbq $0,%r9
- sbbq $0,%r10
- movq %r9,8(%rdi)
- sbbq $0,%r11
- movq %r10,16(%rdi)
- sbbq %rax,%rax
- movq %r11,24(%rdi)
- andq $38,%rax
-
- subq %rax,%r8
- movq %r8,0(%rdi)
-
-.Lfe64_sub_epilogue:
- .byte 0xf3,0xc3
-.size x25519_fe64_sub,.-x25519_fe64_sub
-
.globl x25519_fe64_tobytes
-.type x25519_fe64_tobytes,@function
-.align 32
+x25519_fe64_mul:
+x25519_fe64_sqr:
+x25519_fe64_mul121666:
+x25519_fe64_add:
+x25519_fe64_sub:
x25519_fe64_tobytes:
-.Lfe64_to_body:
- movq 0(%rsi),%r8
- movq 8(%rsi),%r9
- movq 16(%rsi),%r10
- movq 24(%rsi),%r11
-
-
- leaq (%r11,%r11,1),%rax
- sarq $63,%r11
- shrq $1,%rax
- andq $19,%r11
- addq $19,%r11
-
- addq %r11,%r8
- adcq $0,%r9
- adcq $0,%r10
- adcq $0,%rax
-
- leaq (%rax,%rax,1),%r11
- sarq $63,%rax
- shrq $1,%r11
- notq %rax
- andq $19,%rax
-
- subq %rax,%r8
- sbbq $0,%r9
- sbbq $0,%r10
- sbbq $0,%r11
-
- movq %r8,0(%rdi)
- movq %r9,8(%rdi)
- movq %r10,16(%rdi)
- movq %r11,24(%rdi)
-
-.Lfe64_to_epilogue:
+.cfi_startproc
+.byte 0x0f,0x0b
.byte 0xf3,0xc3
-.size x25519_fe64_tobytes,.-x25519_fe64_tobytes
+.cfi_endproc
+.size x25519_fe64_mul,.-x25519_fe64_mul
.byte 88,50,53,53,49,57,32,112,114,105,109,105,116,105,118,101,115,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
diff --git a/secure/lib/libcrypto/amd64/x86_64-mont.S b/secure/lib/libcrypto/amd64/x86_64-mont.S
index 015a87c446b70..2fd4d2f460060 100644
--- a/secure/lib/libcrypto/amd64/x86_64-mont.S
+++ b/secure/lib/libcrypto/amd64/x86_64-mont.S
@@ -16,7 +16,6 @@ bn_mul_mont:
jnz .Lmul_enter
cmpl $8,%r9d
jb .Lmul_enter
- movl OPENSSL_ia32cap_P+8(%rip),%r11d
cmpq %rsi,%rdx
jne .Lmul4x_enter
testl $7,%r9d
@@ -265,9 +264,6 @@ bn_mul4x_mont:
movq %rsp,%rax
.cfi_def_cfa_register %rax
.Lmul4x_enter:
- andl $0x80100,%r11d
- cmpl $0x80100,%r11d
- je .Lmulx4x_enter
pushq %rbx
.cfi_offset %rbx,-16
pushq %rbp
@@ -693,7 +689,6 @@ bn_mul4x_mont:
.size bn_mul4x_mont,.-bn_mul4x_mont
-
.type bn_sqr8x_mont,@function
.align 32
bn_sqr8x_mont:
@@ -775,25 +770,6 @@ bn_sqr8x_mont:
pxor %xmm0,%xmm0
.byte 102,72,15,110,207
.byte 102,73,15,110,218
- movl OPENSSL_ia32cap_P+8(%rip),%eax
- andl $0x80100,%eax
- cmpl $0x80100,%eax
- jne .Lsqr8x_nox
-
- call bn_sqrx8x_internal
-
-
-
-
- leaq (%r8,%rcx,1),%rbx
- movq %rcx,%r9
- movq %rcx,%rdx
-.byte 102,72,15,126,207
- sarq $3+2,%rcx
- jmp .Lsqr8x_sub
-
-.align 32
-.Lsqr8x_nox:
call bn_sqr8x_internal
@@ -881,361 +857,5 @@ bn_sqr8x_mont:
.byte 0xf3,0xc3
.cfi_endproc
.size bn_sqr8x_mont,.-bn_sqr8x_mont
-.type bn_mulx4x_mont,@function
-.align 32
-bn_mulx4x_mont:
-.cfi_startproc
- movq %rsp,%rax
-.cfi_def_cfa_register %rax
-.Lmulx4x_enter:
- pushq %rbx
-.cfi_offset %rbx,-16
- pushq %rbp
-.cfi_offset %rbp,-24
- pushq %r12
-.cfi_offset %r12,-32
- pushq %r13
-.cfi_offset %r13,-40
- pushq %r14
-.cfi_offset %r14,-48
- pushq %r15
-.cfi_offset %r15,-56
-.Lmulx4x_prologue:
-
- shll $3,%r9d
- xorq %r10,%r10
- subq %r9,%r10
- movq (%r8),%r8
- leaq -72(%rsp,%r10,1),%rbp
- andq $-128,%rbp
- movq %rsp,%r11
- subq %rbp,%r11
- andq $-4096,%r11
- leaq (%r11,%rbp,1),%rsp
- movq (%rsp),%r10
- cmpq %rbp,%rsp
- ja .Lmulx4x_page_walk
- jmp .Lmulx4x_page_walk_done
-
-.align 16
-.Lmulx4x_page_walk:
- leaq -4096(%rsp),%rsp
- movq (%rsp),%r10
- cmpq %rbp,%rsp
- ja .Lmulx4x_page_walk
-.Lmulx4x_page_walk_done:
-
- leaq (%rdx,%r9,1),%r10
-
-
-
-
-
-
-
-
-
-
-
-
- movq %r9,0(%rsp)
- shrq $5,%r9
- movq %r10,16(%rsp)
- subq $1,%r9
- movq %r8,24(%rsp)
- movq %rdi,32(%rsp)
- movq %rax,40(%rsp)
-.cfi_escape 0x0f,0x05,0x77,0x28,0x06,0x23,0x08
- movq %r9,48(%rsp)
- jmp .Lmulx4x_body
-
-.align 32
-.Lmulx4x_body:
- leaq 8(%rdx),%rdi
- movq (%rdx),%rdx
- leaq 64+32(%rsp),%rbx
- movq %rdx,%r9
-
- mulxq 0(%rsi),%r8,%rax
- mulxq 8(%rsi),%r11,%r14
- addq %rax,%r11
- movq %rdi,8(%rsp)
- mulxq 16(%rsi),%r12,%r13
- adcq %r14,%r12
- adcq $0,%r13
-
- movq %r8,%rdi
- imulq 24(%rsp),%r8
- xorq %rbp,%rbp
-
- mulxq 24(%rsi),%rax,%r14
- movq %r8,%rdx
- leaq 32(%rsi),%rsi
- adcxq %rax,%r13
- adcxq %rbp,%r14
-
- mulxq 0(%rcx),%rax,%r10
- adcxq %rax,%rdi
- adoxq %r11,%r10
- mulxq 8(%rcx),%rax,%r11
- adcxq %rax,%r10
- adoxq %r12,%r11
-.byte 0xc4,0x62,0xfb,0xf6,0xa1,0x10,0x00,0x00,0x00
- movq 48(%rsp),%rdi
- movq %r10,-32(%rbx)
- adcxq %rax,%r11
- adoxq %r13,%r12
- mulxq 24(%rcx),%rax,%r15
- movq %r9,%rdx
- movq %r11,-24(%rbx)
- adcxq %rax,%r12
- adoxq %rbp,%r15
- leaq 32(%rcx),%rcx
- movq %r12,-16(%rbx)
-
- jmp .Lmulx4x_1st
-
-.align 32
-.Lmulx4x_1st:
- adcxq %rbp,%r15
- mulxq 0(%rsi),%r10,%rax
- adcxq %r14,%r10
- mulxq 8(%rsi),%r11,%r14
- adcxq %rax,%r11
- mulxq 16(%rsi),%r12,%rax
- adcxq %r14,%r12
- mulxq 24(%rsi),%r13,%r14
-.byte 0x67,0x67
- movq %r8,%rdx
- adcxq %rax,%r13
- adcxq %rbp,%r14
- leaq 32(%rsi),%rsi
- leaq 32(%rbx),%rbx
-
- adoxq %r15,%r10
- mulxq 0(%rcx),%rax,%r15
- adcxq %rax,%r10
- adoxq %r15,%r11
- mulxq 8(%rcx),%rax,%r15
- adcxq %rax,%r11
- adoxq %r15,%r12
- mulxq 16(%rcx),%rax,%r15
- movq %r10,-40(%rbx)
- adcxq %rax,%r12
- movq %r11,-32(%rbx)
- adoxq %r15,%r13
- mulxq 24(%rcx),%rax,%r15
- movq %r9,%rdx
- movq %r12,-24(%rbx)
- adcxq %rax,%r13
- adoxq %rbp,%r15
- leaq 32(%rcx),%rcx
- movq %r13,-16(%rbx)
-
- decq %rdi
- jnz .Lmulx4x_1st
-
- movq 0(%rsp),%rax
- movq 8(%rsp),%rdi
- adcq %rbp,%r15
- addq %r15,%r14
- sbbq %r15,%r15
- movq %r14,-8(%rbx)
- jmp .Lmulx4x_outer
-
-.align 32
-.Lmulx4x_outer:
- movq (%rdi),%rdx
- leaq 8(%rdi),%rdi
- subq %rax,%rsi
- movq %r15,(%rbx)
- leaq 64+32(%rsp),%rbx
- subq %rax,%rcx
-
- mulxq 0(%rsi),%r8,%r11
- xorl %ebp,%ebp
- movq %rdx,%r9
- mulxq 8(%rsi),%r14,%r12
- adoxq -32(%rbx),%r8
- adcxq %r14,%r11
- mulxq 16(%rsi),%r15,%r13
- adoxq -24(%rbx),%r11
- adcxq %r15,%r12
- adoxq -16(%rbx),%r12
- adcxq %rbp,%r13
- adoxq %rbp,%r13
-
- movq %rdi,8(%rsp)
- movq %r8,%r15
- imulq 24(%rsp),%r8
- xorl %ebp,%ebp
-
- mulxq 24(%rsi),%rax,%r14
- movq %r8,%rdx
- adcxq %rax,%r13
- adoxq -8(%rbx),%r13
- adcxq %rbp,%r14
- leaq 32(%rsi),%rsi
- adoxq %rbp,%r14
-
- mulxq 0(%rcx),%rax,%r10
- adcxq %rax,%r15
- adoxq %r11,%r10
- mulxq 8(%rcx),%rax,%r11
- adcxq %rax,%r10
- adoxq %r12,%r11
- mulxq 16(%rcx),%rax,%r12
- movq %r10,-32(%rbx)
- adcxq %rax,%r11
- adoxq %r13,%r12
- mulxq 24(%rcx),%rax,%r15
- movq %r9,%rdx
- movq %r11,-24(%rbx)
- leaq 32(%rcx),%rcx
- adcxq %rax,%r12
- adoxq %rbp,%r15
- movq 48(%rsp),%rdi
- movq %r12,-16(%rbx)
-
- jmp .Lmulx4x_inner
-
-.align 32
-.Lmulx4x_inner:
- mulxq 0(%rsi),%r10,%rax
- adcxq %rbp,%r15
- adoxq %r14,%r10
- mulxq 8(%rsi),%r11,%r14
- adcxq 0(%rbx),%r10
- adoxq %rax,%r11
- mulxq 16(%rsi),%r12,%rax
- adcxq 8(%rbx),%r11
- adoxq %r14,%r12
- mulxq 24(%rsi),%r13,%r14
- movq %r8,%rdx
- adcxq 16(%rbx),%r12
- adoxq %rax,%r13
- adcxq 24(%rbx),%r13
- adoxq %rbp,%r14
- leaq 32(%rsi),%rsi
- leaq 32(%rbx),%rbx
- adcxq %rbp,%r14
-
- adoxq %r15,%r10
- mulxq 0(%rcx),%rax,%r15
- adcxq %rax,%r10
- adoxq %r15,%r11
- mulxq 8(%rcx),%rax,%r15
- adcxq %rax,%r11
- adoxq %r15,%r12
- mulxq 16(%rcx),%rax,%r15
- movq %r10,-40(%rbx)
- adcxq %rax,%r12
- adoxq %r15,%r13
- mulxq 24(%rcx),%rax,%r15
- movq %r9,%rdx
- movq %r11,-32(%rbx)
- movq %r12,-24(%rbx)
- adcxq %rax,%r13
- adoxq %rbp,%r15
- leaq 32(%rcx),%rcx
- movq %r13,-16(%rbx)
-
- decq %rdi
- jnz .Lmulx4x_inner
-
- movq 0(%rsp),%rax
- movq 8(%rsp),%rdi
- adcq %rbp,%r15
- subq 0(%rbx),%rbp
- adcq %r15,%r14
- sbbq %r15,%r15
- movq %r14,-8(%rbx)
-
- cmpq 16(%rsp),%rdi
- jne .Lmulx4x_outer
-
- leaq 64(%rsp),%rbx
- subq %rax,%rcx
- negq %r15
- movq %rax,%rdx
- shrq $3+2,%rax
- movq 32(%rsp),%rdi
- jmp .Lmulx4x_sub
-
-.align 32
-.Lmulx4x_sub:
- movq 0(%rbx),%r11
- movq 8(%rbx),%r12
- movq 16(%rbx),%r13
- movq 24(%rbx),%r14
- leaq 32(%rbx),%rbx
- sbbq 0(%rcx),%r11
- sbbq 8(%rcx),%r12
- sbbq 16(%rcx),%r13
- sbbq 24(%rcx),%r14
- leaq 32(%rcx),%rcx
- movq %r11,0(%rdi)
- movq %r12,8(%rdi)
- movq %r13,16(%rdi)
- movq %r14,24(%rdi)
- leaq 32(%rdi),%rdi
- decq %rax
- jnz .Lmulx4x_sub
-
- sbbq $0,%r15
- leaq 64(%rsp),%rbx
- subq %rdx,%rdi
-
-.byte 102,73,15,110,207
- pxor %xmm0,%xmm0
- pshufd $0,%xmm1,%xmm1
- movq 40(%rsp),%rsi
-.cfi_def_cfa %rsi,8
- jmp .Lmulx4x_cond_copy
-
-.align 32
-.Lmulx4x_cond_copy:
- movdqa 0(%rbx),%xmm2
- movdqa 16(%rbx),%xmm3
- leaq 32(%rbx),%rbx
- movdqu 0(%rdi),%xmm4
- movdqu 16(%rdi),%xmm5
- leaq 32(%rdi),%rdi
- movdqa %xmm0,-32(%rbx)
- movdqa %xmm0,-16(%rbx)
- pcmpeqd %xmm1,%xmm0
- pand %xmm1,%xmm2
- pand %xmm1,%xmm3
- pand %xmm0,%xmm4
- pand %xmm0,%xmm5
- pxor %xmm0,%xmm0
- por %xmm2,%xmm4
- por %xmm3,%xmm5
- movdqu %xmm4,-32(%rdi)
- movdqu %xmm5,-16(%rdi)
- subq $32,%rdx
- jnz .Lmulx4x_cond_copy
-
- movq %rdx,(%rbx)
-
- movq $1,%rax
- movq -48(%rsi),%r15
-.cfi_restore %r15
- movq -40(%rsi),%r14
-.cfi_restore %r14
- movq -32(%rsi),%r13
-.cfi_restore %r13
- movq -24(%rsi),%r12
-.cfi_restore %r12
- movq -16(%rsi),%rbp
-.cfi_restore %rbp
- movq -8(%rsi),%rbx
-.cfi_restore %rbx
- leaq (%rsi),%rsp
-.cfi_def_cfa_register %rsp
-.Lmulx4x_epilogue:
- .byte 0xf3,0xc3
-.cfi_endproc
-.size bn_mulx4x_mont,.-bn_mulx4x_mont
.byte 77,111,110,116,103,111,109,101,114,121,32,77,117,108,116,105,112,108,105,99,97,116,105,111,110,32,102,111,114,32,120,56,54,95,54,52,44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0
.align 16
diff --git a/secure/lib/libcrypto/amd64/x86_64-mont5.S b/secure/lib/libcrypto/amd64/x86_64-mont5.S
index 85686f2aa4a29..b69366fa905d0 100644
--- a/secure/lib/libcrypto/amd64/x86_64-mont5.S
+++ b/secure/lib/libcrypto/amd64/x86_64-mont5.S
@@ -14,7 +14,6 @@ bn_mul_mont_gather5:
.cfi_def_cfa_register %rax
testl $7,%r9d
jnz .Lmul_enter
- movl OPENSSL_ia32cap_P+8(%rip),%r11d
jmp .Lmul4x_enter
.align 16
@@ -451,9 +450,6 @@ bn_mul4x_mont_gather5:
movq %rsp,%rax
.cfi_def_cfa_register %rax
.Lmul4x_enter:
- andl $0x80108,%r11d
- cmpl $0x80108,%r11d
- je .Lmulx4x_enter
pushq %rbx
.cfi_offset %rbx,-16
pushq %rbp
@@ -552,6 +548,7 @@ bn_mul4x_mont_gather5:
.type mul4x_internal,@function
.align 32
mul4x_internal:
+.cfi_startproc
shlq $5,%r9
movd 8(%rax),%xmm5
leaq .Linc(%rip),%rax
@@ -1073,6 +1070,7 @@ mul4x_internal:
movq 16(%rbp),%r14
movq 24(%rbp),%r15
jmp .Lsqr4x_sub_entry
+.cfi_endproc
.size mul4x_internal,.-mul4x_internal
.globl bn_power5
.type bn_power5,@function
@@ -1081,10 +1079,6 @@ bn_power5:
.cfi_startproc
movq %rsp,%rax
.cfi_def_cfa_register %rax
- movl OPENSSL_ia32cap_P+8(%rip),%r11d
- andl $0x80108,%r11d
- cmpl $0x80108,%r11d
- je .Lpowerx5_enter
pushq %rbx
.cfi_offset %rbx,-16
pushq %rbp
@@ -1215,6 +1209,7 @@ bn_power5:
.align 32
bn_sqr8x_internal:
__bn_sqr8x_internal:
+.cfi_startproc
@@ -1989,10 +1984,12 @@ __bn_sqr8x_reduction:
cmpq %rdx,%rdi
jb .L8x_reduction_loop
.byte 0xf3,0xc3
+.cfi_endproc
.size bn_sqr8x_internal,.-bn_sqr8x_internal
.type __bn_post4x_internal,@function
.align 32
__bn_post4x_internal:
+.cfi_startproc
movq 0(%rbp),%r12
leaq (%rdi,%r9,1),%rbx
movq %r9,%rcx
@@ -2043,15 +2040,18 @@ __bn_post4x_internal:
movq %r9,%r10
negq %r9
.byte 0xf3,0xc3
+.cfi_endproc
.size __bn_post4x_internal,.-__bn_post4x_internal
.globl bn_from_montgomery
.type bn_from_montgomery,@function
.align 32
bn_from_montgomery:
+.cfi_startproc
testl $7,%r9d
jz bn_from_mont8x
xorl %eax,%eax
.byte 0xf3,0xc3
+.cfi_endproc
.size bn_from_montgomery,.-bn_from_montgomery
.type bn_from_mont8x,@function
@@ -2168,21 +2168,6 @@ bn_from_mont8x:
.byte 0x67
movq %rcx,%rbp
.byte 102,73,15,110,218
- movl OPENSSL_ia32cap_P+8(%rip),%r11d
- andl $0x80108,%r11d
- cmpl $0x80108,%r11d
- jne .Lfrom_mont_nox
-
- leaq (%rax,%r9,1),%rdi
- call __bn_sqrx8x_reduction
- call __bn_postx4x_internal
-
- pxor %xmm0,%xmm0
- leaq 48(%rsp),%rax
- jmp .Lfrom_mont_zero
-
-.align 32
-.Lfrom_mont_nox:
call __bn_sqr8x_reduction
call __bn_post4x_internal
@@ -2221,1348 +2206,11 @@ bn_from_mont8x:
.byte 0xf3,0xc3
.cfi_endproc
.size bn_from_mont8x,.-bn_from_mont8x
-.type bn_mulx4x_mont_gather5,@function
-.align 32
-bn_mulx4x_mont_gather5:
-.cfi_startproc
- movq %rsp,%rax
-.cfi_def_cfa_register %rax
-.Lmulx4x_enter:
- pushq %rbx
-.cfi_offset %rbx,-16
- pushq %rbp
-.cfi_offset %rbp,-24
- pushq %r12
-.cfi_offset %r12,-32
- pushq %r13
-.cfi_offset %r13,-40
- pushq %r14
-.cfi_offset %r14,-48
- pushq %r15
-.cfi_offset %r15,-56
-.Lmulx4x_prologue:
-
- shll $3,%r9d
- leaq (%r9,%r9,2),%r10
- negq %r9
- movq (%r8),%r8
-
-
-
-
-
-
-
-
-
-
- leaq -320(%rsp,%r9,2),%r11
- movq %rsp,%rbp
- subq %rdi,%r11
- andq $4095,%r11
- cmpq %r11,%r10
- jb .Lmulx4xsp_alt
- subq %r11,%rbp
- leaq -320(%rbp,%r9,2),%rbp
- jmp .Lmulx4xsp_done
-
-.Lmulx4xsp_alt:
- leaq 4096-320(,%r9,2),%r10
- leaq -320(%rbp,%r9,2),%rbp
- subq %r10,%r11
- movq $0,%r10
- cmovcq %r10,%r11
- subq %r11,%rbp
-.Lmulx4xsp_done:
- andq $-64,%rbp
- movq %rsp,%r11
- subq %rbp,%r11
- andq $-4096,%r11
- leaq (%r11,%rbp,1),%rsp
- movq (%rsp),%r10
- cmpq %rbp,%rsp
- ja .Lmulx4x_page_walk
- jmp .Lmulx4x_page_walk_done
-
-.Lmulx4x_page_walk:
- leaq -4096(%rsp),%rsp
- movq (%rsp),%r10
- cmpq %rbp,%rsp
- ja .Lmulx4x_page_walk
-.Lmulx4x_page_walk_done:
-
-
-
-
-
-
-
-
-
-
-
-
-
- movq %r8,32(%rsp)
- movq %rax,40(%rsp)
-.cfi_escape 0x0f,0x05,0x77,0x28,0x06,0x23,0x08
-.Lmulx4x_body:
- call mulx4x_internal
-
- movq 40(%rsp),%rsi
-.cfi_def_cfa %rsi,8
- movq $1,%rax
-
- movq -48(%rsi),%r15
-.cfi_restore %r15
- movq -40(%rsi),%r14
-.cfi_restore %r14
- movq -32(%rsi),%r13
-.cfi_restore %r13
- movq -24(%rsi),%r12
-.cfi_restore %r12
- movq -16(%rsi),%rbp
-.cfi_restore %rbp
- movq -8(%rsi),%rbx
-.cfi_restore %rbx
- leaq (%rsi),%rsp
-.cfi_def_cfa_register %rsp
-.Lmulx4x_epilogue:
- .byte 0xf3,0xc3
-.cfi_endproc
-.size bn_mulx4x_mont_gather5,.-bn_mulx4x_mont_gather5
-
-.type mulx4x_internal,@function
-.align 32
-mulx4x_internal:
- movq %r9,8(%rsp)
- movq %r9,%r10
- negq %r9
- shlq $5,%r9
- negq %r10
- leaq 128(%rdx,%r9,1),%r13
- shrq $5+5,%r9
- movd 8(%rax),%xmm5
- subq $1,%r9
- leaq .Linc(%rip),%rax
- movq %r13,16+8(%rsp)
- movq %r9,24+8(%rsp)
- movq %rdi,56+8(%rsp)
- movdqa 0(%rax),%xmm0
- movdqa 16(%rax),%xmm1
- leaq 88-112(%rsp,%r10,1),%r10
- leaq 128(%rdx),%rdi
-
- pshufd $0,%xmm5,%xmm5
- movdqa %xmm1,%xmm4
-.byte 0x67
- movdqa %xmm1,%xmm2
-.byte 0x67
- paddd %xmm0,%xmm1
- pcmpeqd %xmm5,%xmm0
- movdqa %xmm4,%xmm3
- paddd %xmm1,%xmm2
- pcmpeqd %xmm5,%xmm1
- movdqa %xmm0,112(%r10)
- movdqa %xmm4,%xmm0
-
- paddd %xmm2,%xmm3
- pcmpeqd %xmm5,%xmm2
- movdqa %xmm1,128(%r10)
- movdqa %xmm4,%xmm1
-
- paddd %xmm3,%xmm0
- pcmpeqd %xmm5,%xmm3
- movdqa %xmm2,144(%r10)
- movdqa %xmm4,%xmm2
-
- paddd %xmm0,%xmm1
- pcmpeqd %xmm5,%xmm0
- movdqa %xmm3,160(%r10)
- movdqa %xmm4,%xmm3
- paddd %xmm1,%xmm2
- pcmpeqd %xmm5,%xmm1
- movdqa %xmm0,176(%r10)
- movdqa %xmm4,%xmm0
-
- paddd %xmm2,%xmm3
- pcmpeqd %xmm5,%xmm2
- movdqa %xmm1,192(%r10)
- movdqa %xmm4,%xmm1
-
- paddd %xmm3,%xmm0
- pcmpeqd %xmm5,%xmm3
- movdqa %xmm2,208(%r10)
- movdqa %xmm4,%xmm2
-
- paddd %xmm0,%xmm1
- pcmpeqd %xmm5,%xmm0
- movdqa %xmm3,224(%r10)
- movdqa %xmm4,%xmm3
- paddd %xmm1,%xmm2
- pcmpeqd %xmm5,%xmm1
- movdqa %xmm0,240(%r10)
- movdqa %xmm4,%xmm0
-
- paddd %xmm2,%xmm3
- pcmpeqd %xmm5,%xmm2
- movdqa %xmm1,256(%r10)
- movdqa %xmm4,%xmm1
-
- paddd %xmm3,%xmm0
- pcmpeqd %xmm5,%xmm3
- movdqa %xmm2,272(%r10)
- movdqa %xmm4,%xmm2
-
- paddd %xmm0,%xmm1
- pcmpeqd %xmm5,%xmm0
- movdqa %xmm3,288(%r10)
- movdqa %xmm4,%xmm3
-.byte 0x67
- paddd %xmm1,%xmm2
- pcmpeqd %xmm5,%xmm1
- movdqa %xmm0,304(%r10)
-
- paddd %xmm2,%xmm3
- pcmpeqd %xmm5,%xmm2
- movdqa %xmm1,320(%r10)
-
- pcmpeqd %xmm5,%xmm3
- movdqa %xmm2,336(%r10)
-
- pand 64(%rdi),%xmm0
- pand 80(%rdi),%xmm1
- pand 96(%rdi),%xmm2
- movdqa %xmm3,352(%r10)
- pand 112(%rdi),%xmm3
- por %xmm2,%xmm0
- por %xmm3,%xmm1
- movdqa -128(%rdi),%xmm4
- movdqa -112(%rdi),%xmm5
- movdqa -96(%rdi),%xmm2
- pand 112(%r10),%xmm4
- movdqa -80(%rdi),%xmm3
- pand 128(%r10),%xmm5
- por %xmm4,%xmm0
- pand 144(%r10),%xmm2
- por %xmm5,%xmm1
- pand 160(%r10),%xmm3
- por %xmm2,%xmm0
- por %xmm3,%xmm1
- movdqa -64(%rdi),%xmm4
- movdqa -48(%rdi),%xmm5
- movdqa -32(%rdi),%xmm2
- pand 176(%r10),%xmm4
- movdqa -16(%rdi),%xmm3
- pand 192(%r10),%xmm5
- por %xmm4,%xmm0
- pand 208(%r10),%xmm2
- por %xmm5,%xmm1
- pand 224(%r10),%xmm3
- por %xmm2,%xmm0
- por %xmm3,%xmm1
- movdqa 0(%rdi),%xmm4
- movdqa 16(%rdi),%xmm5
- movdqa 32(%rdi),%xmm2
- pand 240(%r10),%xmm4
- movdqa 48(%rdi),%xmm3
- pand 256(%r10),%xmm5
- por %xmm4,%xmm0
- pand 272(%r10),%xmm2
- por %xmm5,%xmm1
- pand 288(%r10),%xmm3
- por %xmm2,%xmm0
- por %xmm3,%xmm1
- pxor %xmm1,%xmm0
- pshufd $0x4e,%xmm0,%xmm1
- por %xmm1,%xmm0
- leaq 256(%rdi),%rdi
-.byte 102,72,15,126,194
- leaq 64+32+8(%rsp),%rbx
-
- movq %rdx,%r9
- mulxq 0(%rsi),%r8,%rax
- mulxq 8(%rsi),%r11,%r12
- addq %rax,%r11
- mulxq 16(%rsi),%rax,%r13
- adcq %rax,%r12
- adcq $0,%r13
- mulxq 24(%rsi),%rax,%r14
-
- movq %r8,%r15
- imulq 32+8(%rsp),%r8
- xorq %rbp,%rbp
- movq %r8,%rdx
-
- movq %rdi,8+8(%rsp)
-
- leaq 32(%rsi),%rsi
- adcxq %rax,%r13
- adcxq %rbp,%r14
-
- mulxq 0(%rcx),%rax,%r10
- adcxq %rax,%r15
- adoxq %r11,%r10
- mulxq 8(%rcx),%rax,%r11
- adcxq %rax,%r10
- adoxq %r12,%r11
- mulxq 16(%rcx),%rax,%r12
- movq 24+8(%rsp),%rdi
- movq %r10,-32(%rbx)
- adcxq %rax,%r11
- adoxq %r13,%r12
- mulxq 24(%rcx),%rax,%r15
- movq %r9,%rdx
- movq %r11,-24(%rbx)
- adcxq %rax,%r12
- adoxq %rbp,%r15
- leaq 32(%rcx),%rcx
- movq %r12,-16(%rbx)
- jmp .Lmulx4x_1st
-
-.align 32
-.Lmulx4x_1st:
- adcxq %rbp,%r15
- mulxq 0(%rsi),%r10,%rax
- adcxq %r14,%r10
- mulxq 8(%rsi),%r11,%r14
- adcxq %rax,%r11
- mulxq 16(%rsi),%r12,%rax
- adcxq %r14,%r12
- mulxq 24(%rsi),%r13,%r14
-.byte 0x67,0x67
- movq %r8,%rdx
- adcxq %rax,%r13
- adcxq %rbp,%r14
- leaq 32(%rsi),%rsi
- leaq 32(%rbx),%rbx
-
- adoxq %r15,%r10
- mulxq 0(%rcx),%rax,%r15
- adcxq %rax,%r10
- adoxq %r15,%r11
- mulxq 8(%rcx),%rax,%r15
- adcxq %rax,%r11
- adoxq %r15,%r12
- mulxq 16(%rcx),%rax,%r15
- movq %r10,-40(%rbx)
- adcxq %rax,%r12
- movq %r11,-32(%rbx)
- adoxq %r15,%r13
- mulxq 24(%rcx),%rax,%r15
- movq %r9,%rdx
- movq %r12,-24(%rbx)
- adcxq %rax,%r13
- adoxq %rbp,%r15
- leaq 32(%rcx),%rcx
- movq %r13,-16(%rbx)
-
- decq %rdi
- jnz .Lmulx4x_1st
-
- movq 8(%rsp),%rax
- adcq %rbp,%r15
- leaq (%rsi,%rax,1),%rsi
- addq %r15,%r14
- movq 8+8(%rsp),%rdi
- adcq %rbp,%rbp
- movq %r14,-8(%rbx)
- jmp .Lmulx4x_outer
-
-.align 32
-.Lmulx4x_outer:
- leaq 16-256(%rbx),%r10
- pxor %xmm4,%xmm4
-.byte 0x67,0x67
- pxor %xmm5,%xmm5
- movdqa -128(%rdi),%xmm0
- movdqa -112(%rdi),%xmm1
- movdqa -96(%rdi),%xmm2
- pand 256(%r10),%xmm0
- movdqa -80(%rdi),%xmm3
- pand 272(%r10),%xmm1
- por %xmm0,%xmm4
- pand 288(%r10),%xmm2
- por %xmm1,%xmm5
- pand 304(%r10),%xmm3
- por %xmm2,%xmm4
- por %xmm3,%xmm5
- movdqa -64(%rdi),%xmm0
- movdqa -48(%rdi),%xmm1
- movdqa -32(%rdi),%xmm2
- pand 320(%r10),%xmm0
- movdqa -16(%rdi),%xmm3
- pand 336(%r10),%xmm1
- por %xmm0,%xmm4
- pand 352(%r10),%xmm2
- por %xmm1,%xmm5
- pand 368(%r10),%xmm3
- por %xmm2,%xmm4
- por %xmm3,%xmm5
- movdqa 0(%rdi),%xmm0
- movdqa 16(%rdi),%xmm1
- movdqa 32(%rdi),%xmm2
- pand 384(%r10),%xmm0
- movdqa 48(%rdi),%xmm3
- pand 400(%r10),%xmm1
- por %xmm0,%xmm4
- pand 416(%r10),%xmm2
- por %xmm1,%xmm5
- pand 432(%r10),%xmm3
- por %xmm2,%xmm4
- por %xmm3,%xmm5
- movdqa 64(%rdi),%xmm0
- movdqa 80(%rdi),%xmm1
- movdqa 96(%rdi),%xmm2
- pand 448(%r10),%xmm0
- movdqa 112(%rdi),%xmm3
- pand 464(%r10),%xmm1
- por %xmm0,%xmm4
- pand 480(%r10),%xmm2
- por %xmm1,%xmm5
- pand 496(%r10),%xmm3
- por %xmm2,%xmm4
- por %xmm3,%xmm5
- por %xmm5,%xmm4
- pshufd $0x4e,%xmm4,%xmm0
- por %xmm4,%xmm0
- leaq 256(%rdi),%rdi
-.byte 102,72,15,126,194
-
- movq %rbp,(%rbx)
- leaq 32(%rbx,%rax,1),%rbx
- mulxq 0(%rsi),%r8,%r11
- xorq %rbp,%rbp
- movq %rdx,%r9
- mulxq 8(%rsi),%r14,%r12
- adoxq -32(%rbx),%r8
- adcxq %r14,%r11
- mulxq 16(%rsi),%r15,%r13
- adoxq -24(%rbx),%r11
- adcxq %r15,%r12
- mulxq 24(%rsi),%rdx,%r14
- adoxq -16(%rbx),%r12
- adcxq %rdx,%r13
- leaq (%rcx,%rax,1),%rcx
- leaq 32(%rsi),%rsi
- adoxq -8(%rbx),%r13
- adcxq %rbp,%r14
- adoxq %rbp,%r14
-
- movq %r8,%r15
- imulq 32+8(%rsp),%r8
-
- movq %r8,%rdx
- xorq %rbp,%rbp
- movq %rdi,8+8(%rsp)
-
- mulxq 0(%rcx),%rax,%r10
- adcxq %rax,%r15
- adoxq %r11,%r10
- mulxq 8(%rcx),%rax,%r11
- adcxq %rax,%r10
- adoxq %r12,%r11
- mulxq 16(%rcx),%rax,%r12
- adcxq %rax,%r11
- adoxq %r13,%r12
- mulxq 24(%rcx),%rax,%r15
- movq %r9,%rdx
- movq 24+8(%rsp),%rdi
- movq %r10,-32(%rbx)
- adcxq %rax,%r12
- movq %r11,-24(%rbx)
- adoxq %rbp,%r15
- movq %r12,-16(%rbx)
- leaq 32(%rcx),%rcx
- jmp .Lmulx4x_inner
-
-.align 32
-.Lmulx4x_inner:
- mulxq 0(%rsi),%r10,%rax
- adcxq %rbp,%r15
- adoxq %r14,%r10
- mulxq 8(%rsi),%r11,%r14
- adcxq 0(%rbx),%r10
- adoxq %rax,%r11
- mulxq 16(%rsi),%r12,%rax
- adcxq 8(%rbx),%r11
- adoxq %r14,%r12
- mulxq 24(%rsi),%r13,%r14
- movq %r8,%rdx
- adcxq 16(%rbx),%r12
- adoxq %rax,%r13
- adcxq 24(%rbx),%r13
- adoxq %rbp,%r14
- leaq 32(%rsi),%rsi
- leaq 32(%rbx),%rbx
- adcxq %rbp,%r14
-
- adoxq %r15,%r10
- mulxq 0(%rcx),%rax,%r15
- adcxq %rax,%r10
- adoxq %r15,%r11
- mulxq 8(%rcx),%rax,%r15
- adcxq %rax,%r11
- adoxq %r15,%r12
- mulxq 16(%rcx),%rax,%r15
- movq %r10,-40(%rbx)
- adcxq %rax,%r12
- adoxq %r15,%r13
- movq %r11,-32(%rbx)
- mulxq 24(%rcx),%rax,%r15
- movq %r9,%rdx
- leaq 32(%rcx),%rcx
- movq %r12,-24(%rbx)
- adcxq %rax,%r13
- adoxq %rbp,%r15
- movq %r13,-16(%rbx)
-
- decq %rdi
- jnz .Lmulx4x_inner
-
- movq 0+8(%rsp),%rax
- adcq %rbp,%r15
- subq 0(%rbx),%rdi
- movq 8+8(%rsp),%rdi
- movq 16+8(%rsp),%r10
- adcq %r15,%r14
- leaq (%rsi,%rax,1),%rsi
- adcq %rbp,%rbp
- movq %r14,-8(%rbx)
-
- cmpq %r10,%rdi
- jb .Lmulx4x_outer
-
- movq -8(%rcx),%r10
- movq %rbp,%r8
- movq (%rcx,%rax,1),%r12
- leaq (%rcx,%rax,1),%rbp
- movq %rax,%rcx
- leaq (%rbx,%rax,1),%rdi
- xorl %eax,%eax
- xorq %r15,%r15
- subq %r14,%r10
- adcq %r15,%r15
- orq %r15,%r8
- sarq $3+2,%rcx
- subq %r8,%rax
- movq 56+8(%rsp),%rdx
- decq %r12
- movq 8(%rbp),%r13
- xorq %r8,%r8
- movq 16(%rbp),%r14
- movq 24(%rbp),%r15
- jmp .Lsqrx4x_sub_entry
-.size mulx4x_internal,.-mulx4x_internal
-.type bn_powerx5,@function
-.align 32
-bn_powerx5:
-.cfi_startproc
- movq %rsp,%rax
-.cfi_def_cfa_register %rax
-.Lpowerx5_enter:
- pushq %rbx
-.cfi_offset %rbx,-16
- pushq %rbp
-.cfi_offset %rbp,-24
- pushq %r12
-.cfi_offset %r12,-32
- pushq %r13
-.cfi_offset %r13,-40
- pushq %r14
-.cfi_offset %r14,-48
- pushq %r15
-.cfi_offset %r15,-56
-.Lpowerx5_prologue:
-
- shll $3,%r9d
- leaq (%r9,%r9,2),%r10
- negq %r9
- movq (%r8),%r8
-
-
-
-
-
-
-
-
- leaq -320(%rsp,%r9,2),%r11
- movq %rsp,%rbp
- subq %rdi,%r11
- andq $4095,%r11
- cmpq %r11,%r10
- jb .Lpwrx_sp_alt
- subq %r11,%rbp
- leaq -320(%rbp,%r9,2),%rbp
- jmp .Lpwrx_sp_done
-
-.align 32
-.Lpwrx_sp_alt:
- leaq 4096-320(,%r9,2),%r10
- leaq -320(%rbp,%r9,2),%rbp
- subq %r10,%r11
- movq $0,%r10
- cmovcq %r10,%r11
- subq %r11,%rbp
-.Lpwrx_sp_done:
- andq $-64,%rbp
- movq %rsp,%r11
- subq %rbp,%r11
- andq $-4096,%r11
- leaq (%r11,%rbp,1),%rsp
- movq (%rsp),%r10
- cmpq %rbp,%rsp
- ja .Lpwrx_page_walk
- jmp .Lpwrx_page_walk_done
-
-.Lpwrx_page_walk:
- leaq -4096(%rsp),%rsp
- movq (%rsp),%r10
- cmpq %rbp,%rsp
- ja .Lpwrx_page_walk
-.Lpwrx_page_walk_done:
-
- movq %r9,%r10
- negq %r9
-
-
-
-
-
-
-
-
-
-
-
-
- pxor %xmm0,%xmm0
-.byte 102,72,15,110,207
-.byte 102,72,15,110,209
-.byte 102,73,15,110,218
-.byte 102,72,15,110,226
- movq %r8,32(%rsp)
- movq %rax,40(%rsp)
-.cfi_escape 0x0f,0x05,0x77,0x28,0x06,0x23,0x08
-.Lpowerx5_body:
-
- call __bn_sqrx8x_internal
- call __bn_postx4x_internal
- call __bn_sqrx8x_internal
- call __bn_postx4x_internal
- call __bn_sqrx8x_internal
- call __bn_postx4x_internal
- call __bn_sqrx8x_internal
- call __bn_postx4x_internal
- call __bn_sqrx8x_internal
- call __bn_postx4x_internal
-
- movq %r10,%r9
- movq %rsi,%rdi
-.byte 102,72,15,126,209
-.byte 102,72,15,126,226
- movq 40(%rsp),%rax
-
- call mulx4x_internal
-
- movq 40(%rsp),%rsi
-.cfi_def_cfa %rsi,8
- movq $1,%rax
-
- movq -48(%rsi),%r15
-.cfi_restore %r15
- movq -40(%rsi),%r14
-.cfi_restore %r14
- movq -32(%rsi),%r13
-.cfi_restore %r13
- movq -24(%rsi),%r12
-.cfi_restore %r12
- movq -16(%rsi),%rbp
-.cfi_restore %rbp
- movq -8(%rsi),%rbx
-.cfi_restore %rbx
- leaq (%rsi),%rsp
-.cfi_def_cfa_register %rsp
-.Lpowerx5_epilogue:
- .byte 0xf3,0xc3
-.cfi_endproc
-.size bn_powerx5,.-bn_powerx5
-
-.globl bn_sqrx8x_internal
-.hidden bn_sqrx8x_internal
-.type bn_sqrx8x_internal,@function
-.align 32
-bn_sqrx8x_internal:
-__bn_sqrx8x_internal:
-.cfi_startproc
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
- leaq 48+8(%rsp),%rdi
- leaq (%rsi,%r9,1),%rbp
- movq %r9,0+8(%rsp)
- movq %rbp,8+8(%rsp)
- jmp .Lsqr8x_zero_start
-
-.align 32
-.byte 0x66,0x66,0x66,0x2e,0x0f,0x1f,0x84,0x00,0x00,0x00,0x00,0x00
-.Lsqrx8x_zero:
-.byte 0x3e
- movdqa %xmm0,0(%rdi)
- movdqa %xmm0,16(%rdi)
- movdqa %xmm0,32(%rdi)
- movdqa %xmm0,48(%rdi)
-.Lsqr8x_zero_start:
- movdqa %xmm0,64(%rdi)
- movdqa %xmm0,80(%rdi)
- movdqa %xmm0,96(%rdi)
- movdqa %xmm0,112(%rdi)
- leaq 128(%rdi),%rdi
- subq $64,%r9
- jnz .Lsqrx8x_zero
-
- movq 0(%rsi),%rdx
-
- xorq %r10,%r10
- xorq %r11,%r11
- xorq %r12,%r12
- xorq %r13,%r13
- xorq %r14,%r14
- xorq %r15,%r15
- leaq 48+8(%rsp),%rdi
- xorq %rbp,%rbp
- jmp .Lsqrx8x_outer_loop
-
-.align 32
-.Lsqrx8x_outer_loop:
- mulxq 8(%rsi),%r8,%rax
- adcxq %r9,%r8
- adoxq %rax,%r10
- mulxq 16(%rsi),%r9,%rax
- adcxq %r10,%r9
- adoxq %rax,%r11
-.byte 0xc4,0xe2,0xab,0xf6,0x86,0x18,0x00,0x00,0x00
- adcxq %r11,%r10
- adoxq %rax,%r12
-.byte 0xc4,0xe2,0xa3,0xf6,0x86,0x20,0x00,0x00,0x00
- adcxq %r12,%r11
- adoxq %rax,%r13
- mulxq 40(%rsi),%r12,%rax
- adcxq %r13,%r12
- adoxq %rax,%r14
- mulxq 48(%rsi),%r13,%rax
- adcxq %r14,%r13
- adoxq %r15,%rax
- mulxq 56(%rsi),%r14,%r15
- movq 8(%rsi),%rdx
- adcxq %rax,%r14
- adoxq %rbp,%r15
- adcq 64(%rdi),%r15
- movq %r8,8(%rdi)
- movq %r9,16(%rdi)
- sbbq %rcx,%rcx
- xorq %rbp,%rbp
-
-
- mulxq 16(%rsi),%r8,%rbx
- mulxq 24(%rsi),%r9,%rax
- adcxq %r10,%r8
- adoxq %rbx,%r9
- mulxq 32(%rsi),%r10,%rbx
- adcxq %r11,%r9
- adoxq %rax,%r10
-.byte 0xc4,0xe2,0xa3,0xf6,0x86,0x28,0x00,0x00,0x00
- adcxq %r12,%r10
- adoxq %rbx,%r11
-.byte 0xc4,0xe2,0x9b,0xf6,0x9e,0x30,0x00,0x00,0x00
- adcxq %r13,%r11
- adoxq %r14,%r12
-.byte 0xc4,0x62,0x93,0xf6,0xb6,0x38,0x00,0x00,0x00
- movq 16(%rsi),%rdx
- adcxq %rax,%r12
- adoxq %rbx,%r13
- adcxq %r15,%r13
- adoxq %rbp,%r14
- adcxq %rbp,%r14
-
- movq %r8,24(%rdi)
- movq %r9,32(%rdi)
-
- mulxq 24(%rsi),%r8,%rbx
- mulxq 32(%rsi),%r9,%rax
- adcxq %r10,%r8
- adoxq %rbx,%r9
- mulxq 40(%rsi),%r10,%rbx
- adcxq %r11,%r9
- adoxq %rax,%r10
-.byte 0xc4,0xe2,0xa3,0xf6,0x86,0x30,0x00,0x00,0x00
- adcxq %r12,%r10
- adoxq %r13,%r11
-.byte 0xc4,0x62,0x9b,0xf6,0xae,0x38,0x00,0x00,0x00
-.byte 0x3e
- movq 24(%rsi),%rdx
- adcxq %rbx,%r11
- adoxq %rax,%r12
- adcxq %r14,%r12
- movq %r8,40(%rdi)
- movq %r9,48(%rdi)
- mulxq 32(%rsi),%r8,%rax
- adoxq %rbp,%r13
- adcxq %rbp,%r13
-
- mulxq 40(%rsi),%r9,%rbx
- adcxq %r10,%r8
- adoxq %rax,%r9
- mulxq 48(%rsi),%r10,%rax
- adcxq %r11,%r9
- adoxq %r12,%r10
- mulxq 56(%rsi),%r11,%r12
- movq 32(%rsi),%rdx
- movq 40(%rsi),%r14
- adcxq %rbx,%r10
- adoxq %rax,%r11
- movq 48(%rsi),%r15
- adcxq %r13,%r11
- adoxq %rbp,%r12
- adcxq %rbp,%r12
-
- movq %r8,56(%rdi)
- movq %r9,64(%rdi)
-
- mulxq %r14,%r9,%rax
- movq 56(%rsi),%r8
- adcxq %r10,%r9
- mulxq %r15,%r10,%rbx
- adoxq %rax,%r10
- adcxq %r11,%r10
- mulxq %r8,%r11,%rax
- movq %r14,%rdx
- adoxq %rbx,%r11
- adcxq %r12,%r11
-
- adcxq %rbp,%rax
-
- mulxq %r15,%r14,%rbx
- mulxq %r8,%r12,%r13
- movq %r15,%rdx
- leaq 64(%rsi),%rsi
- adcxq %r14,%r11
- adoxq %rbx,%r12
- adcxq %rax,%r12
- adoxq %rbp,%r13
-
-.byte 0x67,0x67
- mulxq %r8,%r8,%r14
- adcxq %r8,%r13
- adcxq %rbp,%r14
-
- cmpq 8+8(%rsp),%rsi
- je .Lsqrx8x_outer_break
-
- negq %rcx
- movq $-8,%rcx
- movq %rbp,%r15
- movq 64(%rdi),%r8
- adcxq 72(%rdi),%r9
- adcxq 80(%rdi),%r10
- adcxq 88(%rdi),%r11
- adcq 96(%rdi),%r12
- adcq 104(%rdi),%r13
- adcq 112(%rdi),%r14
- adcq 120(%rdi),%r15
- leaq (%rsi),%rbp
- leaq 128(%rdi),%rdi
- sbbq %rax,%rax
-
- movq -64(%rsi),%rdx
- movq %rax,16+8(%rsp)
- movq %rdi,24+8(%rsp)
-
-
- xorl %eax,%eax
- jmp .Lsqrx8x_loop
-
-.align 32
-.Lsqrx8x_loop:
- movq %r8,%rbx
- mulxq 0(%rbp),%rax,%r8
- adcxq %rax,%rbx
- adoxq %r9,%r8
-
- mulxq 8(%rbp),%rax,%r9
- adcxq %rax,%r8
- adoxq %r10,%r9
-
- mulxq 16(%rbp),%rax,%r10
- adcxq %rax,%r9
- adoxq %r11,%r10
-
- mulxq 24(%rbp),%rax,%r11
- adcxq %rax,%r10
- adoxq %r12,%r11
-
-.byte 0xc4,0x62,0xfb,0xf6,0xa5,0x20,0x00,0x00,0x00
- adcxq %rax,%r11
- adoxq %r13,%r12
-
- mulxq 40(%rbp),%rax,%r13
- adcxq %rax,%r12
- adoxq %r14,%r13
-
- mulxq 48(%rbp),%rax,%r14
- movq %rbx,(%rdi,%rcx,8)
- movl $0,%ebx
- adcxq %rax,%r13
- adoxq %r15,%r14
-
-.byte 0xc4,0x62,0xfb,0xf6,0xbd,0x38,0x00,0x00,0x00
- movq 8(%rsi,%rcx,8),%rdx
- adcxq %rax,%r14
- adoxq %rbx,%r15
- adcxq %rbx,%r15
-
-.byte 0x67
- incq %rcx
- jnz .Lsqrx8x_loop
-
- leaq 64(%rbp),%rbp
- movq $-8,%rcx
- cmpq 8+8(%rsp),%rbp
- je .Lsqrx8x_break
-
- subq 16+8(%rsp),%rbx
-.byte 0x66
- movq -64(%rsi),%rdx
- adcxq 0(%rdi),%r8
- adcxq 8(%rdi),%r9
- adcq 16(%rdi),%r10
- adcq 24(%rdi),%r11
- adcq 32(%rdi),%r12
- adcq 40(%rdi),%r13
- adcq 48(%rdi),%r14
- adcq 56(%rdi),%r15
- leaq 64(%rdi),%rdi
-.byte 0x67
- sbbq %rax,%rax
- xorl %ebx,%ebx
- movq %rax,16+8(%rsp)
- jmp .Lsqrx8x_loop
-
-.align 32
-.Lsqrx8x_break:
- xorq %rbp,%rbp
- subq 16+8(%rsp),%rbx
- adcxq %rbp,%r8
- movq 24+8(%rsp),%rcx
- adcxq %rbp,%r9
- movq 0(%rsi),%rdx
- adcq $0,%r10
- movq %r8,0(%rdi)
- adcq $0,%r11
- adcq $0,%r12
- adcq $0,%r13
- adcq $0,%r14
- adcq $0,%r15
- cmpq %rcx,%rdi
- je .Lsqrx8x_outer_loop
-
- movq %r9,8(%rdi)
- movq 8(%rcx),%r9
- movq %r10,16(%rdi)
- movq 16(%rcx),%r10
- movq %r11,24(%rdi)
- movq 24(%rcx),%r11
- movq %r12,32(%rdi)
- movq 32(%rcx),%r12
- movq %r13,40(%rdi)
- movq 40(%rcx),%r13
- movq %r14,48(%rdi)
- movq 48(%rcx),%r14
- movq %r15,56(%rdi)
- movq 56(%rcx),%r15
- movq %rcx,%rdi
- jmp .Lsqrx8x_outer_loop
-
-.align 32
-.Lsqrx8x_outer_break:
- movq %r9,72(%rdi)
-.byte 102,72,15,126,217
- movq %r10,80(%rdi)
- movq %r11,88(%rdi)
- movq %r12,96(%rdi)
- movq %r13,104(%rdi)
- movq %r14,112(%rdi)
- leaq 48+8(%rsp),%rdi
- movq (%rsi,%rcx,1),%rdx
-
- movq 8(%rdi),%r11
- xorq %r10,%r10
- movq 0+8(%rsp),%r9
- adoxq %r11,%r11
- movq 16(%rdi),%r12
- movq 24(%rdi),%r13
-
-
-.align 32
-.Lsqrx4x_shift_n_add:
- mulxq %rdx,%rax,%rbx
- adoxq %r12,%r12
- adcxq %r10,%rax
-.byte 0x48,0x8b,0x94,0x0e,0x08,0x00,0x00,0x00
-.byte 0x4c,0x8b,0x97,0x20,0x00,0x00,0x00
- adoxq %r13,%r13
- adcxq %r11,%rbx
- movq 40(%rdi),%r11
- movq %rax,0(%rdi)
- movq %rbx,8(%rdi)
-
- mulxq %rdx,%rax,%rbx
- adoxq %r10,%r10
- adcxq %r12,%rax
- movq 16(%rsi,%rcx,1),%rdx
- movq 48(%rdi),%r12
- adoxq %r11,%r11
- adcxq %r13,%rbx
- movq 56(%rdi),%r13
- movq %rax,16(%rdi)
- movq %rbx,24(%rdi)
-
- mulxq %rdx,%rax,%rbx
- adoxq %r12,%r12
- adcxq %r10,%rax
- movq 24(%rsi,%rcx,1),%rdx
- leaq 32(%rcx),%rcx
- movq 64(%rdi),%r10
- adoxq %r13,%r13
- adcxq %r11,%rbx
- movq 72(%rdi),%r11
- movq %rax,32(%rdi)
- movq %rbx,40(%rdi)
-
- mulxq %rdx,%rax,%rbx
- adoxq %r10,%r10
- adcxq %r12,%rax
- jrcxz .Lsqrx4x_shift_n_add_break
-.byte 0x48,0x8b,0x94,0x0e,0x00,0x00,0x00,0x00
- adoxq %r11,%r11
- adcxq %r13,%rbx
- movq 80(%rdi),%r12
- movq 88(%rdi),%r13
- movq %rax,48(%rdi)
- movq %rbx,56(%rdi)
- leaq 64(%rdi),%rdi
- nop
- jmp .Lsqrx4x_shift_n_add
-
-.align 32
-.Lsqrx4x_shift_n_add_break:
- adcxq %r13,%rbx
- movq %rax,48(%rdi)
- movq %rbx,56(%rdi)
- leaq 64(%rdi),%rdi
-.byte 102,72,15,126,213
-__bn_sqrx8x_reduction:
- xorl %eax,%eax
- movq 32+8(%rsp),%rbx
- movq 48+8(%rsp),%rdx
- leaq -64(%rbp,%r9,1),%rcx
-
- movq %rcx,0+8(%rsp)
- movq %rdi,8+8(%rsp)
-
- leaq 48+8(%rsp),%rdi
- jmp .Lsqrx8x_reduction_loop
-
-.align 32
-.Lsqrx8x_reduction_loop:
- movq 8(%rdi),%r9
- movq 16(%rdi),%r10
- movq 24(%rdi),%r11
- movq 32(%rdi),%r12
- movq %rdx,%r8
- imulq %rbx,%rdx
- movq 40(%rdi),%r13
- movq 48(%rdi),%r14
- movq 56(%rdi),%r15
- movq %rax,24+8(%rsp)
-
- leaq 64(%rdi),%rdi
- xorq %rsi,%rsi
- movq $-8,%rcx
- jmp .Lsqrx8x_reduce
-
-.align 32
-.Lsqrx8x_reduce:
- movq %r8,%rbx
- mulxq 0(%rbp),%rax,%r8
- adcxq %rbx,%rax
- adoxq %r9,%r8
-
- mulxq 8(%rbp),%rbx,%r9
- adcxq %rbx,%r8
- adoxq %r10,%r9
-
- mulxq 16(%rbp),%rbx,%r10
- adcxq %rbx,%r9
- adoxq %r11,%r10
-
- mulxq 24(%rbp),%rbx,%r11
- adcxq %rbx,%r10
- adoxq %r12,%r11
-
-.byte 0xc4,0x62,0xe3,0xf6,0xa5,0x20,0x00,0x00,0x00
- movq %rdx,%rax
- movq %r8,%rdx
- adcxq %rbx,%r11
- adoxq %r13,%r12
-
- mulxq 32+8(%rsp),%rbx,%rdx
- movq %rax,%rdx
- movq %rax,64+48+8(%rsp,%rcx,8)
-
- mulxq 40(%rbp),%rax,%r13
- adcxq %rax,%r12
- adoxq %r14,%r13
-
- mulxq 48(%rbp),%rax,%r14
- adcxq %rax,%r13
- adoxq %r15,%r14
-
- mulxq 56(%rbp),%rax,%r15
- movq %rbx,%rdx
- adcxq %rax,%r14
- adoxq %rsi,%r15
- adcxq %rsi,%r15
-
-.byte 0x67,0x67,0x67
- incq %rcx
- jnz .Lsqrx8x_reduce
-
- movq %rsi,%rax
- cmpq 0+8(%rsp),%rbp
- jae .Lsqrx8x_no_tail
-
- movq 48+8(%rsp),%rdx
- addq 0(%rdi),%r8
- leaq 64(%rbp),%rbp
- movq $-8,%rcx
- adcxq 8(%rdi),%r9
- adcxq 16(%rdi),%r10
- adcq 24(%rdi),%r11
- adcq 32(%rdi),%r12
- adcq 40(%rdi),%r13
- adcq 48(%rdi),%r14
- adcq 56(%rdi),%r15
- leaq 64(%rdi),%rdi
- sbbq %rax,%rax
-
- xorq %rsi,%rsi
- movq %rax,16+8(%rsp)
- jmp .Lsqrx8x_tail
-
-.align 32
-.Lsqrx8x_tail:
- movq %r8,%rbx
- mulxq 0(%rbp),%rax,%r8
- adcxq %rax,%rbx
- adoxq %r9,%r8
-
- mulxq 8(%rbp),%rax,%r9
- adcxq %rax,%r8
- adoxq %r10,%r9
-
- mulxq 16(%rbp),%rax,%r10
- adcxq %rax,%r9
- adoxq %r11,%r10
-
- mulxq 24(%rbp),%rax,%r11
- adcxq %rax,%r10
- adoxq %r12,%r11
-
-.byte 0xc4,0x62,0xfb,0xf6,0xa5,0x20,0x00,0x00,0x00
- adcxq %rax,%r11
- adoxq %r13,%r12
-
- mulxq 40(%rbp),%rax,%r13
- adcxq %rax,%r12
- adoxq %r14,%r13
-
- mulxq 48(%rbp),%rax,%r14
- adcxq %rax,%r13
- adoxq %r15,%r14
-
- mulxq 56(%rbp),%rax,%r15
- movq 72+48+8(%rsp,%rcx,8),%rdx
- adcxq %rax,%r14
- adoxq %rsi,%r15
- movq %rbx,(%rdi,%rcx,8)
- movq %r8,%rbx
- adcxq %rsi,%r15
-
- incq %rcx
- jnz .Lsqrx8x_tail
-
- cmpq 0+8(%rsp),%rbp
- jae .Lsqrx8x_tail_done
-
- subq 16+8(%rsp),%rsi
- movq 48+8(%rsp),%rdx
- leaq 64(%rbp),%rbp
- adcq 0(%rdi),%r8
- adcq 8(%rdi),%r9
- adcq 16(%rdi),%r10
- adcq 24(%rdi),%r11
- adcq 32(%rdi),%r12
- adcq 40(%rdi),%r13
- adcq 48(%rdi),%r14
- adcq 56(%rdi),%r15
- leaq 64(%rdi),%rdi
- sbbq %rax,%rax
- subq $8,%rcx
-
- xorq %rsi,%rsi
- movq %rax,16+8(%rsp)
- jmp .Lsqrx8x_tail
-
-.align 32
-.Lsqrx8x_tail_done:
- xorq %rax,%rax
- addq 24+8(%rsp),%r8
- adcq $0,%r9
- adcq $0,%r10
- adcq $0,%r11
- adcq $0,%r12
- adcq $0,%r13
- adcq $0,%r14
- adcq $0,%r15
- adcq $0,%rax
-
- subq 16+8(%rsp),%rsi
-.Lsqrx8x_no_tail:
- adcq 0(%rdi),%r8
-.byte 102,72,15,126,217
- adcq 8(%rdi),%r9
- movq 56(%rbp),%rsi
-.byte 102,72,15,126,213
- adcq 16(%rdi),%r10
- adcq 24(%rdi),%r11
- adcq 32(%rdi),%r12
- adcq 40(%rdi),%r13
- adcq 48(%rdi),%r14
- adcq 56(%rdi),%r15
- adcq $0,%rax
-
- movq 32+8(%rsp),%rbx
- movq 64(%rdi,%rcx,1),%rdx
-
- movq %r8,0(%rdi)
- leaq 64(%rdi),%r8
- movq %r9,8(%rdi)
- movq %r10,16(%rdi)
- movq %r11,24(%rdi)
- movq %r12,32(%rdi)
- movq %r13,40(%rdi)
- movq %r14,48(%rdi)
- movq %r15,56(%rdi)
-
- leaq 64(%rdi,%rcx,1),%rdi
- cmpq 8+8(%rsp),%r8
- jb .Lsqrx8x_reduction_loop
- .byte 0xf3,0xc3
-.cfi_endproc
-.size bn_sqrx8x_internal,.-bn_sqrx8x_internal
-.align 32
-__bn_postx4x_internal:
- movq 0(%rbp),%r12
- movq %rcx,%r10
- movq %rcx,%r9
- negq %rax
- sarq $3+2,%rcx
-
-.byte 102,72,15,126,202
-.byte 102,72,15,126,206
- decq %r12
- movq 8(%rbp),%r13
- xorq %r8,%r8
- movq 16(%rbp),%r14
- movq 24(%rbp),%r15
- jmp .Lsqrx4x_sub_entry
-
-.align 16
-.Lsqrx4x_sub:
- movq 0(%rbp),%r12
- movq 8(%rbp),%r13
- movq 16(%rbp),%r14
- movq 24(%rbp),%r15
-.Lsqrx4x_sub_entry:
- andnq %rax,%r12,%r12
- leaq 32(%rbp),%rbp
- andnq %rax,%r13,%r13
- andnq %rax,%r14,%r14
- andnq %rax,%r15,%r15
-
- negq %r8
- adcq 0(%rdi),%r12
- adcq 8(%rdi),%r13
- adcq 16(%rdi),%r14
- adcq 24(%rdi),%r15
- movq %r12,0(%rdx)
- leaq 32(%rdi),%rdi
- movq %r13,8(%rdx)
- sbbq %r8,%r8
- movq %r14,16(%rdx)
- movq %r15,24(%rdx)
- leaq 32(%rdx),%rdx
-
- incq %rcx
- jnz .Lsqrx4x_sub
-
- negq %r9
-
- .byte 0xf3,0xc3
-.size __bn_postx4x_internal,.-__bn_postx4x_internal
.globl bn_get_bits5
.type bn_get_bits5,@function
.align 16
bn_get_bits5:
+.cfi_startproc
leaq 0(%rdi),%r10
leaq 1(%rdi),%r11
movl %esi,%ecx
@@ -3576,12 +2224,14 @@ bn_get_bits5:
shrl %cl,%eax
andl $31,%eax
.byte 0xf3,0xc3
+.cfi_endproc
.size bn_get_bits5,.-bn_get_bits5
.globl bn_scatter5
.type bn_scatter5,@function
.align 16
bn_scatter5:
+.cfi_startproc
cmpl $0,%esi
jz .Lscatter_epilogue
leaq (%rdx,%rcx,8),%rdx
@@ -3594,6 +2244,7 @@ bn_scatter5:
jnz .Lscatter
.Lscatter_epilogue:
.byte 0xf3,0xc3
+.cfi_endproc
.size bn_scatter5,.-bn_scatter5
.globl bn_gather5
@@ -3601,6 +2252,7 @@ bn_scatter5:
.align 32
bn_gather5:
.LSEH_begin_bn_gather5:
+.cfi_startproc
.byte 0x4c,0x8d,0x14,0x24
.byte 0x48,0x81,0xec,0x08,0x01,0x00,0x00
@@ -3758,6 +2410,7 @@ bn_gather5:
leaq (%r10),%rsp
.byte 0xf3,0xc3
.LSEH_end_bn_gather5:
+.cfi_endproc
.size bn_gather5,.-bn_gather5
.align 64
.Linc:
diff --git a/secure/lib/libcrypto/arm/aes-armv4.S b/secure/lib/libcrypto/arm/aes-armv4.S
index 2c169908db682..3da60f935ad06 100644
--- a/secure/lib/libcrypto/arm/aes-armv4.S
+++ b/secure/lib/libcrypto/arm/aes-armv4.S
@@ -1,6 +1,6 @@
/* $FreeBSD$ */
/* Do not modify. This file is auto-generated from aes-armv4.pl. */
-@ Copyright 2007-2018 The OpenSSL Project Authors. All Rights Reserved.
+@ Copyright 2007-2020 The OpenSSL Project Authors. All Rights Reserved.
@
@ Licensed under the OpenSSL license (the "License"). You may not use
@ this file except in compliance with the License. You can obtain a copy
diff --git a/secure/lib/libcrypto/arm/bsaes-armv7.S b/secure/lib/libcrypto/arm/bsaes-armv7.S
index 26a126408ba02..8fe75aaf81ca2 100644
--- a/secure/lib/libcrypto/arm/bsaes-armv7.S
+++ b/secure/lib/libcrypto/arm/bsaes-armv7.S
@@ -1,6 +1,6 @@
/* $FreeBSD$ */
/* Do not modify. This file is auto-generated from bsaes-armv7.pl. */
-@ Copyright 2012-2018 The OpenSSL Project Authors. All Rights Reserved.
+@ Copyright 2012-2020 The OpenSSL Project Authors. All Rights Reserved.
@
@ Licensed under the OpenSSL license (the "License"). You may not use
@ this file except in compliance with the License. You can obtain a copy
diff --git a/secure/lib/libcrypto/arm/ecp_nistz256-armv4.S b/secure/lib/libcrypto/arm/ecp_nistz256-armv4.S
index 636a5cb7bc651..5a049d99e666f 100644
--- a/secure/lib/libcrypto/arm/ecp_nistz256-armv4.S
+++ b/secure/lib/libcrypto/arm/ecp_nistz256-armv4.S
@@ -3759,7 +3759,7 @@ ecp_nistz256_point_add:
#endif
movne r12,#-1
stmia r3,{r4,r5,r6,r7,r8,r9,r10,r11}
- str r12,[sp,#32*18+8] @ !in2infty
+ str r12,[sp,#32*18+8] @ ~in2infty
ldmia r1!,{r4,r5,r6,r7,r8,r9,r10,r11} @ copy in1_x
add r3,sp,#96
@@ -3780,7 +3780,7 @@ ecp_nistz256_point_add:
#endif
movne r12,#-1
stmia r3,{r4,r5,r6,r7,r8,r9,r10,r11}
- str r12,[sp,#32*18+4] @ !in1infty
+ str r12,[sp,#32*18+4] @ ~in1infty
add r1,sp,#256
add r2,sp,#256
@@ -3845,33 +3845,20 @@ ecp_nistz256_point_add:
orr r11,r11,r4
orr r6,r6,r8
orr r11,r11,r9
- orrs r11,r11,r6
+ orr r11,r11,r6 @ ~is_equal(U1,U2)
- bne .Ladd_proceed @ is_equal(U1,U2)?
+ ldr r10,[sp,#32*18+4] @ ~in1infty
+ ldr r12,[sp,#32*18+8] @ ~in2infty
+ ldr r14,[sp,#32*18+12] @ ~is_equal(S1,S2)
+ mvn r10,r10 @ -1/0 -> 0/-1
+ mvn r12,r12 @ -1/0 -> 0/-1
+ orr r11,r10
+ orr r11,r12
+ orrs r11,r14 @ set flags
- ldr r10,[sp,#32*18+4]
- ldr r12,[sp,#32*18+8]
- ldr r14,[sp,#32*18+12]
- tst r10,r12
- beq .Ladd_proceed @ (in1infty || in2infty)?
- tst r14,r14
- beq .Ladd_double @ is_equal(S1,S2)?
+ @ if(~is_equal(U1,U2) | in1infty | in2infty | ~is_equal(S1,S2))
+ bne .Ladd_proceed
- ldr r0,[sp,#32*18+16]
- eor r4,r4,r4
- eor r5,r5,r5
- eor r6,r6,r6
- eor r7,r7,r7
- eor r8,r8,r8
- eor r9,r9,r9
- eor r10,r10,r10
- eor r11,r11,r11
- stmia r0!,{r4,r5,r6,r7,r8,r9,r10,r11}
- stmia r0!,{r4,r5,r6,r7,r8,r9,r10,r11}
- stmia r0!,{r4,r5,r6,r7,r8,r9,r10,r11}
- b .Ladd_done
-
-.align 4
.Ladd_double:
ldr r1,[sp,#32*18+20]
add sp,sp,#32*(18-5)+16 @ difference in frame sizes
@@ -3936,24 +3923,24 @@ ecp_nistz256_point_add:
add r2,sp,#544
bl __ecp_nistz256_sub_from @ p256_sub(res_y, res_y, S2);
- ldr r11,[sp,#32*18+4] @ !in1intfy
- ldr r12,[sp,#32*18+8] @ !in2intfy
+ ldr r11,[sp,#32*18+4] @ ~in1infty
+ ldr r12,[sp,#32*18+8] @ ~in2infty
add r1,sp,#0
add r2,sp,#192
- and r10,r11,r12
+ and r10,r11,r12 @ ~in1infty & ~in2infty
mvn r11,r11
add r3,sp,#96
- and r11,r11,r12
- mvn r12,r12
+ and r11,r11,r12 @ in1infty & ~in2infty
+ mvn r12,r12 @ in2infty
ldr r0,[sp,#32*18+16]
ldmia r1!,{r4,r5} @ res_x
ldmia r2!,{r6,r7} @ in2_x
ldmia r3!,{r8,r9} @ in1_x
- and r4,r4,r10
+ and r4,r4,r10 @ ~in1infty & ~in2infty
and r5,r5,r10
- and r6,r6,r11
+ and r6,r6,r11 @ in1infty & ~in2infty
and r7,r7,r11
- and r8,r8,r12
+ and r8,r8,r12 @ in2infty
and r9,r9,r12
orr r4,r4,r6
orr r5,r5,r7
@@ -3963,11 +3950,11 @@ ecp_nistz256_point_add:
ldmia r1!,{r4,r5} @ res_x
ldmia r2!,{r6,r7} @ in2_x
ldmia r3!,{r8,r9} @ in1_x
- and r4,r4,r10
+ and r4,r4,r10 @ ~in1infty & ~in2infty
and r5,r5,r10
- and r6,r6,r11
+ and r6,r6,r11 @ in1infty & ~in2infty
and r7,r7,r11
- and r8,r8,r12
+ and r8,r8,r12 @ in2infty
and r9,r9,r12
orr r4,r4,r6
orr r5,r5,r7
@@ -3977,11 +3964,11 @@ ecp_nistz256_point_add:
ldmia r1!,{r4,r5} @ res_x
ldmia r2!,{r6,r7} @ in2_x
ldmia r3!,{r8,r9} @ in1_x
- and r4,r4,r10
+ and r4,r4,r10 @ ~in1infty & ~in2infty
and r5,r5,r10
- and r6,r6,r11
+ and r6,r6,r11 @ in1infty & ~in2infty
and r7,r7,r11
- and r8,r8,r12
+ and r8,r8,r12 @ in2infty
and r9,r9,r12
orr r4,r4,r6
orr r5,r5,r7
@@ -3991,11 +3978,11 @@ ecp_nistz256_point_add:
ldmia r1!,{r4,r5} @ res_x
ldmia r2!,{r6,r7} @ in2_x
ldmia r3!,{r8,r9} @ in1_x
- and r4,r4,r10
+ and r4,r4,r10 @ ~in1infty & ~in2infty
and r5,r5,r10
- and r6,r6,r11
+ and r6,r6,r11 @ in1infty & ~in2infty
and r7,r7,r11
- and r8,r8,r12
+ and r8,r8,r12 @ in2infty
and r9,r9,r12
orr r4,r4,r6
orr r5,r5,r7
@@ -4005,11 +3992,11 @@ ecp_nistz256_point_add:
ldmia r1!,{r4,r5} @ res_x
ldmia r2!,{r6,r7} @ in2_x
ldmia r3!,{r8,r9} @ in1_x
- and r4,r4,r10
+ and r4,r4,r10 @ ~in1infty & ~in2infty
and r5,r5,r10
- and r6,r6,r11
+ and r6,r6,r11 @ in1infty & ~in2infty
and r7,r7,r11
- and r8,r8,r12
+ and r8,r8,r12 @ in2infty
and r9,r9,r12
orr r4,r4,r6
orr r5,r5,r7
@@ -4019,11 +4006,11 @@ ecp_nistz256_point_add:
ldmia r1!,{r4,r5} @ res_x
ldmia r2!,{r6,r7} @ in2_x
ldmia r3!,{r8,r9} @ in1_x
- and r4,r4,r10
+ and r4,r4,r10 @ ~in1infty & ~in2infty
and r5,r5,r10
- and r6,r6,r11
+ and r6,r6,r11 @ in1infty & ~in2infty
and r7,r7,r11
- and r8,r8,r12
+ and r8,r8,r12 @ in2infty
and r9,r9,r12
orr r4,r4,r6
orr r5,r5,r7
@@ -4033,11 +4020,11 @@ ecp_nistz256_point_add:
ldmia r1!,{r4,r5} @ res_x
ldmia r2!,{r6,r7} @ in2_x
ldmia r3!,{r8,r9} @ in1_x
- and r4,r4,r10
+ and r4,r4,r10 @ ~in1infty & ~in2infty
and r5,r5,r10
- and r6,r6,r11
+ and r6,r6,r11 @ in1infty & ~in2infty
and r7,r7,r11
- and r8,r8,r12
+ and r8,r8,r12 @ in2infty
and r9,r9,r12
orr r4,r4,r6
orr r5,r5,r7
@@ -4047,11 +4034,11 @@ ecp_nistz256_point_add:
ldmia r1!,{r4,r5} @ res_x
ldmia r2!,{r6,r7} @ in2_x
ldmia r3!,{r8,r9} @ in1_x
- and r4,r4,r10
+ and r4,r4,r10 @ ~in1infty & ~in2infty
and r5,r5,r10
- and r6,r6,r11
+ and r6,r6,r11 @ in1infty & ~in2infty
and r7,r7,r11
- and r8,r8,r12
+ and r8,r8,r12 @ in2infty
and r9,r9,r12
orr r4,r4,r6
orr r5,r5,r7
@@ -4061,11 +4048,11 @@ ecp_nistz256_point_add:
ldmia r1!,{r4,r5} @ res_x
ldmia r2!,{r6,r7} @ in2_x
ldmia r3!,{r8,r9} @ in1_x
- and r4,r4,r10
+ and r4,r4,r10 @ ~in1infty & ~in2infty
and r5,r5,r10
- and r6,r6,r11
+ and r6,r6,r11 @ in1infty & ~in2infty
and r7,r7,r11
- and r8,r8,r12
+ and r8,r8,r12 @ in2infty
and r9,r9,r12
orr r4,r4,r6
orr r5,r5,r7
@@ -4075,11 +4062,11 @@ ecp_nistz256_point_add:
ldmia r1!,{r4,r5} @ res_x
ldmia r2!,{r6,r7} @ in2_x
ldmia r3!,{r8,r9} @ in1_x
- and r4,r4,r10
+ and r4,r4,r10 @ ~in1infty & ~in2infty
and r5,r5,r10
- and r6,r6,r11
+ and r6,r6,r11 @ in1infty & ~in2infty
and r7,r7,r11
- and r8,r8,r12
+ and r8,r8,r12 @ in2infty
and r9,r9,r12
orr r4,r4,r6
orr r5,r5,r7
@@ -4089,11 +4076,11 @@ ecp_nistz256_point_add:
ldmia r1!,{r4,r5} @ res_x
ldmia r2!,{r6,r7} @ in2_x
ldmia r3!,{r8,r9} @ in1_x
- and r4,r4,r10
+ and r4,r4,r10 @ ~in1infty & ~in2infty
and r5,r5,r10
- and r6,r6,r11
+ and r6,r6,r11 @ in1infty & ~in2infty
and r7,r7,r11
- and r8,r8,r12
+ and r8,r8,r12 @ in2infty
and r9,r9,r12
orr r4,r4,r6
orr r5,r5,r7
@@ -4103,11 +4090,11 @@ ecp_nistz256_point_add:
ldmia r1!,{r4,r5} @ res_x
ldmia r2!,{r6,r7} @ in2_x
ldmia r3!,{r8,r9} @ in1_x
- and r4,r4,r10
+ and r4,r4,r10 @ ~in1infty & ~in2infty
and r5,r5,r10
- and r6,r6,r11
+ and r6,r6,r11 @ in1infty & ~in2infty
and r7,r7,r11
- and r8,r8,r12
+ and r8,r8,r12 @ in2infty
and r9,r9,r12
orr r4,r4,r6
orr r5,r5,r7
@@ -4149,7 +4136,7 @@ ecp_nistz256_point_add_affine:
#endif
movne r12,#-1
stmia r3,{r4,r5,r6,r7,r8,r9,r10,r11}
- str r12,[sp,#32*15+4] @ !in1infty
+ str r12,[sp,#32*15+4] @ ~in1infty
ldmia r2!,{r4,r5,r6,r7,r8,r9,r10,r11} @ copy in2_x
add r3,sp,#192
@@ -4176,7 +4163,7 @@ ecp_nistz256_point_add_affine:
it ne
#endif
movne r12,#-1
- str r12,[sp,#32*15+8] @ !in2infty
+ str r12,[sp,#32*15+8] @ ~in2infty
add r1,sp,#160
add r2,sp,#160
@@ -4258,24 +4245,24 @@ ecp_nistz256_point_add_affine:
add r2,sp,#288
bl __ecp_nistz256_sub_from @ p256_sub(res_y, res_y, S2);
- ldr r11,[sp,#32*15+4] @ !in1intfy
- ldr r12,[sp,#32*15+8] @ !in2intfy
+ ldr r11,[sp,#32*15+4] @ ~in1infty
+ ldr r12,[sp,#32*15+8] @ ~in2infty
add r1,sp,#0
add r2,sp,#192
- and r10,r11,r12
+ and r10,r11,r12 @ ~in1infty & ~in2infty
mvn r11,r11
add r3,sp,#96
- and r11,r11,r12
- mvn r12,r12
+ and r11,r11,r12 @ in1infty & ~in2infty
+ mvn r12,r12 @ in2infty
ldr r0,[sp,#32*15]
ldmia r1!,{r4,r5} @ res_x
ldmia r2!,{r6,r7} @ in2_x
ldmia r3!,{r8,r9} @ in1_x
- and r4,r4,r10
+ and r4,r4,r10 @ ~in1infty & ~in2infty
and r5,r5,r10
- and r6,r6,r11
+ and r6,r6,r11 @ in1infty & ~in2infty
and r7,r7,r11
- and r8,r8,r12
+ and r8,r8,r12 @ in2infty
and r9,r9,r12
orr r4,r4,r6
orr r5,r5,r7
@@ -4285,11 +4272,11 @@ ecp_nistz256_point_add_affine:
ldmia r1!,{r4,r5} @ res_x
ldmia r2!,{r6,r7} @ in2_x
ldmia r3!,{r8,r9} @ in1_x
- and r4,r4,r10
+ and r4,r4,r10 @ ~in1infty & ~in2infty
and r5,r5,r10
- and r6,r6,r11
+ and r6,r6,r11 @ in1infty & ~in2infty
and r7,r7,r11
- and r8,r8,r12
+ and r8,r8,r12 @ in2infty
and r9,r9,r12
orr r4,r4,r6
orr r5,r5,r7
@@ -4299,11 +4286,11 @@ ecp_nistz256_point_add_affine:
ldmia r1!,{r4,r5} @ res_x
ldmia r2!,{r6,r7} @ in2_x
ldmia r3!,{r8,r9} @ in1_x
- and r4,r4,r10
+ and r4,r4,r10 @ ~in1infty & ~in2infty
and r5,r5,r10
- and r6,r6,r11
+ and r6,r6,r11 @ in1infty & ~in2infty
and r7,r7,r11
- and r8,r8,r12
+ and r8,r8,r12 @ in2infty
and r9,r9,r12
orr r4,r4,r6
orr r5,r5,r7
@@ -4313,11 +4300,11 @@ ecp_nistz256_point_add_affine:
ldmia r1!,{r4,r5} @ res_x
ldmia r2!,{r6,r7} @ in2_x
ldmia r3!,{r8,r9} @ in1_x
- and r4,r4,r10
+ and r4,r4,r10 @ ~in1infty & ~in2infty
and r5,r5,r10
- and r6,r6,r11
+ and r6,r6,r11 @ in1infty & ~in2infty
and r7,r7,r11
- and r8,r8,r12
+ and r8,r8,r12 @ in2infty
and r9,r9,r12
orr r4,r4,r6
orr r5,r5,r7
@@ -4327,11 +4314,11 @@ ecp_nistz256_point_add_affine:
ldmia r1!,{r4,r5} @ res_x
ldmia r2!,{r6,r7} @ in2_x
ldmia r3!,{r8,r9} @ in1_x
- and r4,r4,r10
+ and r4,r4,r10 @ ~in1infty & ~in2infty
and r5,r5,r10
- and r6,r6,r11
+ and r6,r6,r11 @ in1infty & ~in2infty
and r7,r7,r11
- and r8,r8,r12
+ and r8,r8,r12 @ in2infty
and r9,r9,r12
orr r4,r4,r6
orr r5,r5,r7
@@ -4341,11 +4328,11 @@ ecp_nistz256_point_add_affine:
ldmia r1!,{r4,r5} @ res_x
ldmia r2!,{r6,r7} @ in2_x
ldmia r3!,{r8,r9} @ in1_x
- and r4,r4,r10
+ and r4,r4,r10 @ ~in1infty & ~in2infty
and r5,r5,r10
- and r6,r6,r11
+ and r6,r6,r11 @ in1infty & ~in2infty
and r7,r7,r11
- and r8,r8,r12
+ and r8,r8,r12 @ in2infty
and r9,r9,r12
orr r4,r4,r6
orr r5,r5,r7
@@ -4355,11 +4342,11 @@ ecp_nistz256_point_add_affine:
ldmia r1!,{r4,r5} @ res_x
ldmia r2!,{r6,r7} @ in2_x
ldmia r3!,{r8,r9} @ in1_x
- and r4,r4,r10
+ and r4,r4,r10 @ ~in1infty & ~in2infty
and r5,r5,r10
- and r6,r6,r11
+ and r6,r6,r11 @ in1infty & ~in2infty
and r7,r7,r11
- and r8,r8,r12
+ and r8,r8,r12 @ in2infty
and r9,r9,r12
orr r4,r4,r6
orr r5,r5,r7
@@ -4369,11 +4356,11 @@ ecp_nistz256_point_add_affine:
ldmia r1!,{r4,r5} @ res_x
ldmia r2!,{r6,r7} @ in2_x
ldmia r3!,{r8,r9} @ in1_x
- and r4,r4,r10
+ and r4,r4,r10 @ ~in1infty & ~in2infty
and r5,r5,r10
- and r6,r6,r11
+ and r6,r6,r11 @ in1infty & ~in2infty
and r7,r7,r11
- and r8,r8,r12
+ and r8,r8,r12 @ in2infty
and r9,r9,r12
orr r4,r4,r6
orr r5,r5,r7
diff --git a/secure/lib/libcrypto/arm/sha256-armv4.S b/secure/lib/libcrypto/arm/sha256-armv4.S
index 5f5562dcc1176..fc7d2c499da58 100644
--- a/secure/lib/libcrypto/arm/sha256-armv4.S
+++ b/secure/lib/libcrypto/arm/sha256-armv4.S
@@ -1,6 +1,6 @@
/* $FreeBSD$ */
/* Do not modify. This file is auto-generated from sha256-armv4.pl. */
-@ Copyright 2007-2018 The OpenSSL Project Authors. All Rights Reserved.
+@ Copyright 2007-2020 The OpenSSL Project Authors. All Rights Reserved.
@
@ Licensed under the OpenSSL license (the "License"). You may not use
@ this file except in compliance with the License. You can obtain a copy
diff --git a/secure/lib/libcrypto/arm/sha512-armv4.S b/secure/lib/libcrypto/arm/sha512-armv4.S
index 7b26d80ea752f..2e230fc407483 100644
--- a/secure/lib/libcrypto/arm/sha512-armv4.S
+++ b/secure/lib/libcrypto/arm/sha512-armv4.S
@@ -1,6 +1,6 @@
/* $FreeBSD$ */
/* Do not modify. This file is auto-generated from sha512-armv4.pl. */
-@ Copyright 2007-2018 The OpenSSL Project Authors. All Rights Reserved.
+@ Copyright 2007-2020 The OpenSSL Project Authors. All Rights Reserved.
@
@ Licensed under the OpenSSL license (the "License"). You may not use
@ this file except in compliance with the License. You can obtain a copy
diff --git a/secure/lib/libcrypto/i386/chacha-x86.S b/secure/lib/libcrypto/i386/chacha-x86.S
index d6b2936a53818..566285310e068 100644
--- a/secure/lib/libcrypto/i386/chacha-x86.S
+++ b/secure/lib/libcrypto/i386/chacha-x86.S
@@ -385,8 +385,6 @@ ChaCha20_ssse3:
pushl %esi
pushl %edi
.Lssse3_shortcut:
- testl $2048,4(%ebp)
- jnz .Lxop_shortcut
movl 20(%esp),%edi
movl 24(%esp),%esi
movl 28(%esp),%ecx
@@ -530,484 +528,6 @@ ChaCha20_ssse3:
.byte 44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32
.byte 60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111
.byte 114,103,62,0
-.globl ChaCha20_xop
-.type ChaCha20_xop,@function
-.align 16
-ChaCha20_xop:
-.L_ChaCha20_xop_begin:
- pushl %ebp
- pushl %ebx
- pushl %esi
- pushl %edi
-.Lxop_shortcut:
- movl 20(%esp),%edi
- movl 24(%esp),%esi
- movl 28(%esp),%ecx
- movl 32(%esp),%edx
- movl 36(%esp),%ebx
- vzeroupper
- movl %esp,%ebp
- subl $524,%esp
- andl $-64,%esp
- movl %ebp,512(%esp)
- leal .Lssse3_data-.Lpic_point(%eax),%eax
- vmovdqu (%ebx),%xmm3
- cmpl $256,%ecx
- jb .L0141x
- movl %edx,516(%esp)
- movl %ebx,520(%esp)
- subl $256,%ecx
- leal 384(%esp),%ebp
- vmovdqu (%edx),%xmm7
- vpshufd $0,%xmm3,%xmm0
- vpshufd $85,%xmm3,%xmm1
- vpshufd $170,%xmm3,%xmm2
- vpshufd $255,%xmm3,%xmm3
- vpaddd 48(%eax),%xmm0,%xmm0
- vpshufd $0,%xmm7,%xmm4
- vpshufd $85,%xmm7,%xmm5
- vpsubd 64(%eax),%xmm0,%xmm0
- vpshufd $170,%xmm7,%xmm6
- vpshufd $255,%xmm7,%xmm7
- vmovdqa %xmm0,64(%ebp)
- vmovdqa %xmm1,80(%ebp)
- vmovdqa %xmm2,96(%ebp)
- vmovdqa %xmm3,112(%ebp)
- vmovdqu 16(%edx),%xmm3
- vmovdqa %xmm4,-64(%ebp)
- vmovdqa %xmm5,-48(%ebp)
- vmovdqa %xmm6,-32(%ebp)
- vmovdqa %xmm7,-16(%ebp)
- vmovdqa 32(%eax),%xmm7
- leal 128(%esp),%ebx
- vpshufd $0,%xmm3,%xmm0
- vpshufd $85,%xmm3,%xmm1
- vpshufd $170,%xmm3,%xmm2
- vpshufd $255,%xmm3,%xmm3
- vpshufd $0,%xmm7,%xmm4
- vpshufd $85,%xmm7,%xmm5
- vpshufd $170,%xmm7,%xmm6
- vpshufd $255,%xmm7,%xmm7
- vmovdqa %xmm0,(%ebp)
- vmovdqa %xmm1,16(%ebp)
- vmovdqa %xmm2,32(%ebp)
- vmovdqa %xmm3,48(%ebp)
- vmovdqa %xmm4,-128(%ebp)
- vmovdqa %xmm5,-112(%ebp)
- vmovdqa %xmm6,-96(%ebp)
- vmovdqa %xmm7,-80(%ebp)
- leal 128(%esi),%esi
- leal 128(%edi),%edi
- jmp .L015outer_loop
-.align 32
-.L015outer_loop:
- vmovdqa -112(%ebp),%xmm1
- vmovdqa -96(%ebp),%xmm2
- vmovdqa -80(%ebp),%xmm3
- vmovdqa -48(%ebp),%xmm5
- vmovdqa -32(%ebp),%xmm6
- vmovdqa -16(%ebp),%xmm7
- vmovdqa %xmm1,-112(%ebx)
- vmovdqa %xmm2,-96(%ebx)
- vmovdqa %xmm3,-80(%ebx)
- vmovdqa %xmm5,-48(%ebx)
- vmovdqa %xmm6,-32(%ebx)
- vmovdqa %xmm7,-16(%ebx)
- vmovdqa 32(%ebp),%xmm2
- vmovdqa 48(%ebp),%xmm3
- vmovdqa 64(%ebp),%xmm4
- vmovdqa 80(%ebp),%xmm5
- vmovdqa 96(%ebp),%xmm6
- vmovdqa 112(%ebp),%xmm7
- vpaddd 64(%eax),%xmm4,%xmm4
- vmovdqa %xmm2,32(%ebx)
- vmovdqa %xmm3,48(%ebx)
- vmovdqa %xmm4,64(%ebx)
- vmovdqa %xmm5,80(%ebx)
- vmovdqa %xmm6,96(%ebx)
- vmovdqa %xmm7,112(%ebx)
- vmovdqa %xmm4,64(%ebp)
- vmovdqa -128(%ebp),%xmm0
- vmovdqa %xmm4,%xmm6
- vmovdqa -64(%ebp),%xmm3
- vmovdqa (%ebp),%xmm4
- vmovdqa 16(%ebp),%xmm5
- movl $10,%edx
- nop
-.align 32
-.L016loop:
- vpaddd %xmm3,%xmm0,%xmm0
- vpxor %xmm0,%xmm6,%xmm6
-.byte 143,232,120,194,246,16
- vpaddd %xmm6,%xmm4,%xmm4
- vpxor %xmm4,%xmm3,%xmm2
- vmovdqa -112(%ebx),%xmm1
-.byte 143,232,120,194,210,12
- vmovdqa -48(%ebx),%xmm3
- vpaddd %xmm2,%xmm0,%xmm0
- vmovdqa 80(%ebx),%xmm7
- vpxor %xmm0,%xmm6,%xmm6
- vpaddd %xmm3,%xmm1,%xmm1
-.byte 143,232,120,194,246,8
- vmovdqa %xmm0,-128(%ebx)
- vpaddd %xmm6,%xmm4,%xmm4
- vmovdqa %xmm6,64(%ebx)
- vpxor %xmm4,%xmm2,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
-.byte 143,232,120,194,210,7
- vmovdqa %xmm4,(%ebx)
-.byte 143,232,120,194,255,16
- vmovdqa %xmm2,-64(%ebx)
- vpaddd %xmm7,%xmm5,%xmm5
- vmovdqa 32(%ebx),%xmm4
- vpxor %xmm5,%xmm3,%xmm3
- vmovdqa -96(%ebx),%xmm0
-.byte 143,232,120,194,219,12
- vmovdqa -32(%ebx),%xmm2
- vpaddd %xmm3,%xmm1,%xmm1
- vmovdqa 96(%ebx),%xmm6
- vpxor %xmm1,%xmm7,%xmm7
- vpaddd %xmm2,%xmm0,%xmm0
-.byte 143,232,120,194,255,8
- vmovdqa %xmm1,-112(%ebx)
- vpaddd %xmm7,%xmm5,%xmm5
- vmovdqa %xmm7,80(%ebx)
- vpxor %xmm5,%xmm3,%xmm3
- vpxor %xmm0,%xmm6,%xmm6
-.byte 143,232,120,194,219,7
- vmovdqa %xmm5,16(%ebx)
-.byte 143,232,120,194,246,16
- vmovdqa %xmm3,-48(%ebx)
- vpaddd %xmm6,%xmm4,%xmm4
- vmovdqa 48(%ebx),%xmm5
- vpxor %xmm4,%xmm2,%xmm2
- vmovdqa -80(%ebx),%xmm1
-.byte 143,232,120,194,210,12
- vmovdqa -16(%ebx),%xmm3
- vpaddd %xmm2,%xmm0,%xmm0
- vmovdqa 112(%ebx),%xmm7
- vpxor %xmm0,%xmm6,%xmm6
- vpaddd %xmm3,%xmm1,%xmm1
-.byte 143,232,120,194,246,8
- vmovdqa %xmm0,-96(%ebx)
- vpaddd %xmm6,%xmm4,%xmm4
- vmovdqa %xmm6,96(%ebx)
- vpxor %xmm4,%xmm2,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
-.byte 143,232,120,194,210,7
-.byte 143,232,120,194,255,16
- vmovdqa %xmm2,-32(%ebx)
- vpaddd %xmm7,%xmm5,%xmm5
- vpxor %xmm5,%xmm3,%xmm3
- vmovdqa -128(%ebx),%xmm0
-.byte 143,232,120,194,219,12
- vmovdqa -48(%ebx),%xmm2
- vpaddd %xmm3,%xmm1,%xmm1
- vpxor %xmm1,%xmm7,%xmm7
- vpaddd %xmm2,%xmm0,%xmm0
-.byte 143,232,120,194,255,8
- vmovdqa %xmm1,-80(%ebx)
- vpaddd %xmm7,%xmm5,%xmm5
- vpxor %xmm5,%xmm3,%xmm3
- vpxor %xmm0,%xmm7,%xmm6
-.byte 143,232,120,194,219,7
-.byte 143,232,120,194,246,16
- vmovdqa %xmm3,-16(%ebx)
- vpaddd %xmm6,%xmm4,%xmm4
- vpxor %xmm4,%xmm2,%xmm2
- vmovdqa -112(%ebx),%xmm1
-.byte 143,232,120,194,210,12
- vmovdqa -32(%ebx),%xmm3
- vpaddd %xmm2,%xmm0,%xmm0
- vmovdqa 64(%ebx),%xmm7
- vpxor %xmm0,%xmm6,%xmm6
- vpaddd %xmm3,%xmm1,%xmm1
-.byte 143,232,120,194,246,8
- vmovdqa %xmm0,-128(%ebx)
- vpaddd %xmm6,%xmm4,%xmm4
- vmovdqa %xmm6,112(%ebx)
- vpxor %xmm4,%xmm2,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
-.byte 143,232,120,194,210,7
- vmovdqa %xmm4,32(%ebx)
-.byte 143,232,120,194,255,16
- vmovdqa %xmm2,-48(%ebx)
- vpaddd %xmm7,%xmm5,%xmm5
- vmovdqa (%ebx),%xmm4
- vpxor %xmm5,%xmm3,%xmm3
- vmovdqa -96(%ebx),%xmm0
-.byte 143,232,120,194,219,12
- vmovdqa -16(%ebx),%xmm2
- vpaddd %xmm3,%xmm1,%xmm1
- vmovdqa 80(%ebx),%xmm6
- vpxor %xmm1,%xmm7,%xmm7
- vpaddd %xmm2,%xmm0,%xmm0
-.byte 143,232,120,194,255,8
- vmovdqa %xmm1,-112(%ebx)
- vpaddd %xmm7,%xmm5,%xmm5
- vmovdqa %xmm7,64(%ebx)
- vpxor %xmm5,%xmm3,%xmm3
- vpxor %xmm0,%xmm6,%xmm6
-.byte 143,232,120,194,219,7
- vmovdqa %xmm5,48(%ebx)
-.byte 143,232,120,194,246,16
- vmovdqa %xmm3,-32(%ebx)
- vpaddd %xmm6,%xmm4,%xmm4
- vmovdqa 16(%ebx),%xmm5
- vpxor %xmm4,%xmm2,%xmm2
- vmovdqa -80(%ebx),%xmm1
-.byte 143,232,120,194,210,12
- vmovdqa -64(%ebx),%xmm3
- vpaddd %xmm2,%xmm0,%xmm0
- vmovdqa 96(%ebx),%xmm7
- vpxor %xmm0,%xmm6,%xmm6
- vpaddd %xmm3,%xmm1,%xmm1
-.byte 143,232,120,194,246,8
- vmovdqa %xmm0,-96(%ebx)
- vpaddd %xmm6,%xmm4,%xmm4
- vmovdqa %xmm6,80(%ebx)
- vpxor %xmm4,%xmm2,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
-.byte 143,232,120,194,210,7
-.byte 143,232,120,194,255,16
- vmovdqa %xmm2,-16(%ebx)
- vpaddd %xmm7,%xmm5,%xmm5
- vpxor %xmm5,%xmm3,%xmm3
- vmovdqa -128(%ebx),%xmm0
-.byte 143,232,120,194,219,12
- vpaddd %xmm3,%xmm1,%xmm1
- vmovdqa 64(%ebx),%xmm6
- vpxor %xmm1,%xmm7,%xmm7
-.byte 143,232,120,194,255,8
- vmovdqa %xmm1,-80(%ebx)
- vpaddd %xmm7,%xmm5,%xmm5
- vmovdqa %xmm7,96(%ebx)
- vpxor %xmm5,%xmm3,%xmm3
-.byte 143,232,120,194,219,7
- decl %edx
- jnz .L016loop
- vmovdqa %xmm3,-64(%ebx)
- vmovdqa %xmm4,(%ebx)
- vmovdqa %xmm5,16(%ebx)
- vmovdqa %xmm6,64(%ebx)
- vmovdqa %xmm7,96(%ebx)
- vmovdqa -112(%ebx),%xmm1
- vmovdqa -96(%ebx),%xmm2
- vmovdqa -80(%ebx),%xmm3
- vpaddd -128(%ebp),%xmm0,%xmm0
- vpaddd -112(%ebp),%xmm1,%xmm1
- vpaddd -96(%ebp),%xmm2,%xmm2
- vpaddd -80(%ebp),%xmm3,%xmm3
- vpunpckldq %xmm1,%xmm0,%xmm6
- vpunpckldq %xmm3,%xmm2,%xmm7
- vpunpckhdq %xmm1,%xmm0,%xmm0
- vpunpckhdq %xmm3,%xmm2,%xmm2
- vpunpcklqdq %xmm7,%xmm6,%xmm1
- vpunpckhqdq %xmm7,%xmm6,%xmm6
- vpunpcklqdq %xmm2,%xmm0,%xmm7
- vpunpckhqdq %xmm2,%xmm0,%xmm3
- vpxor -128(%esi),%xmm1,%xmm4
- vpxor -64(%esi),%xmm6,%xmm5
- vpxor (%esi),%xmm7,%xmm6
- vpxor 64(%esi),%xmm3,%xmm7
- leal 16(%esi),%esi
- vmovdqa -64(%ebx),%xmm0
- vmovdqa -48(%ebx),%xmm1
- vmovdqa -32(%ebx),%xmm2
- vmovdqa -16(%ebx),%xmm3
- vmovdqu %xmm4,-128(%edi)
- vmovdqu %xmm5,-64(%edi)
- vmovdqu %xmm6,(%edi)
- vmovdqu %xmm7,64(%edi)
- leal 16(%edi),%edi
- vpaddd -64(%ebp),%xmm0,%xmm0
- vpaddd -48(%ebp),%xmm1,%xmm1
- vpaddd -32(%ebp),%xmm2,%xmm2
- vpaddd -16(%ebp),%xmm3,%xmm3
- vpunpckldq %xmm1,%xmm0,%xmm6
- vpunpckldq %xmm3,%xmm2,%xmm7
- vpunpckhdq %xmm1,%xmm0,%xmm0
- vpunpckhdq %xmm3,%xmm2,%xmm2
- vpunpcklqdq %xmm7,%xmm6,%xmm1
- vpunpckhqdq %xmm7,%xmm6,%xmm6
- vpunpcklqdq %xmm2,%xmm0,%xmm7
- vpunpckhqdq %xmm2,%xmm0,%xmm3
- vpxor -128(%esi),%xmm1,%xmm4
- vpxor -64(%esi),%xmm6,%xmm5
- vpxor (%esi),%xmm7,%xmm6
- vpxor 64(%esi),%xmm3,%xmm7
- leal 16(%esi),%esi
- vmovdqa (%ebx),%xmm0
- vmovdqa 16(%ebx),%xmm1
- vmovdqa 32(%ebx),%xmm2
- vmovdqa 48(%ebx),%xmm3
- vmovdqu %xmm4,-128(%edi)
- vmovdqu %xmm5,-64(%edi)
- vmovdqu %xmm6,(%edi)
- vmovdqu %xmm7,64(%edi)
- leal 16(%edi),%edi
- vpaddd (%ebp),%xmm0,%xmm0
- vpaddd 16(%ebp),%xmm1,%xmm1
- vpaddd 32(%ebp),%xmm2,%xmm2
- vpaddd 48(%ebp),%xmm3,%xmm3
- vpunpckldq %xmm1,%xmm0,%xmm6
- vpunpckldq %xmm3,%xmm2,%xmm7
- vpunpckhdq %xmm1,%xmm0,%xmm0
- vpunpckhdq %xmm3,%xmm2,%xmm2
- vpunpcklqdq %xmm7,%xmm6,%xmm1
- vpunpckhqdq %xmm7,%xmm6,%xmm6
- vpunpcklqdq %xmm2,%xmm0,%xmm7
- vpunpckhqdq %xmm2,%xmm0,%xmm3
- vpxor -128(%esi),%xmm1,%xmm4
- vpxor -64(%esi),%xmm6,%xmm5
- vpxor (%esi),%xmm7,%xmm6
- vpxor 64(%esi),%xmm3,%xmm7
- leal 16(%esi),%esi
- vmovdqa 64(%ebx),%xmm0
- vmovdqa 80(%ebx),%xmm1
- vmovdqa 96(%ebx),%xmm2
- vmovdqa 112(%ebx),%xmm3
- vmovdqu %xmm4,-128(%edi)
- vmovdqu %xmm5,-64(%edi)
- vmovdqu %xmm6,(%edi)
- vmovdqu %xmm7,64(%edi)
- leal 16(%edi),%edi
- vpaddd 64(%ebp),%xmm0,%xmm0
- vpaddd 80(%ebp),%xmm1,%xmm1
- vpaddd 96(%ebp),%xmm2,%xmm2
- vpaddd 112(%ebp),%xmm3,%xmm3
- vpunpckldq %xmm1,%xmm0,%xmm6
- vpunpckldq %xmm3,%xmm2,%xmm7
- vpunpckhdq %xmm1,%xmm0,%xmm0
- vpunpckhdq %xmm3,%xmm2,%xmm2
- vpunpcklqdq %xmm7,%xmm6,%xmm1
- vpunpckhqdq %xmm7,%xmm6,%xmm6
- vpunpcklqdq %xmm2,%xmm0,%xmm7
- vpunpckhqdq %xmm2,%xmm0,%xmm3
- vpxor -128(%esi),%xmm1,%xmm4
- vpxor -64(%esi),%xmm6,%xmm5
- vpxor (%esi),%xmm7,%xmm6
- vpxor 64(%esi),%xmm3,%xmm7
- leal 208(%esi),%esi
- vmovdqu %xmm4,-128(%edi)
- vmovdqu %xmm5,-64(%edi)
- vmovdqu %xmm6,(%edi)
- vmovdqu %xmm7,64(%edi)
- leal 208(%edi),%edi
- subl $256,%ecx
- jnc .L015outer_loop
- addl $256,%ecx
- jz .L017done
- movl 520(%esp),%ebx
- leal -128(%esi),%esi
- movl 516(%esp),%edx
- leal -128(%edi),%edi
- vmovd 64(%ebp),%xmm2
- vmovdqu (%ebx),%xmm3
- vpaddd 96(%eax),%xmm2,%xmm2
- vpand 112(%eax),%xmm3,%xmm3
- vpor %xmm2,%xmm3,%xmm3
-.L0141x:
- vmovdqa 32(%eax),%xmm0
- vmovdqu (%edx),%xmm1
- vmovdqu 16(%edx),%xmm2
- vmovdqa (%eax),%xmm6
- vmovdqa 16(%eax),%xmm7
- movl %ebp,48(%esp)
- vmovdqa %xmm0,(%esp)
- vmovdqa %xmm1,16(%esp)
- vmovdqa %xmm2,32(%esp)
- vmovdqa %xmm3,48(%esp)
- movl $10,%edx
- jmp .L018loop1x
-.align 16
-.L019outer1x:
- vmovdqa 80(%eax),%xmm3
- vmovdqa (%esp),%xmm0
- vmovdqa 16(%esp),%xmm1
- vmovdqa 32(%esp),%xmm2
- vpaddd 48(%esp),%xmm3,%xmm3
- movl $10,%edx
- vmovdqa %xmm3,48(%esp)
- jmp .L018loop1x
-.align 16
-.L018loop1x:
- vpaddd %xmm1,%xmm0,%xmm0
- vpxor %xmm0,%xmm3,%xmm3
-.byte 143,232,120,194,219,16
- vpaddd %xmm3,%xmm2,%xmm2
- vpxor %xmm2,%xmm1,%xmm1
-.byte 143,232,120,194,201,12
- vpaddd %xmm1,%xmm0,%xmm0
- vpxor %xmm0,%xmm3,%xmm3
-.byte 143,232,120,194,219,8
- vpaddd %xmm3,%xmm2,%xmm2
- vpxor %xmm2,%xmm1,%xmm1
-.byte 143,232,120,194,201,7
- vpshufd $78,%xmm2,%xmm2
- vpshufd $57,%xmm1,%xmm1
- vpshufd $147,%xmm3,%xmm3
- vpaddd %xmm1,%xmm0,%xmm0
- vpxor %xmm0,%xmm3,%xmm3
-.byte 143,232,120,194,219,16
- vpaddd %xmm3,%xmm2,%xmm2
- vpxor %xmm2,%xmm1,%xmm1
-.byte 143,232,120,194,201,12
- vpaddd %xmm1,%xmm0,%xmm0
- vpxor %xmm0,%xmm3,%xmm3
-.byte 143,232,120,194,219,8
- vpaddd %xmm3,%xmm2,%xmm2
- vpxor %xmm2,%xmm1,%xmm1
-.byte 143,232,120,194,201,7
- vpshufd $78,%xmm2,%xmm2
- vpshufd $147,%xmm1,%xmm1
- vpshufd $57,%xmm3,%xmm3
- decl %edx
- jnz .L018loop1x
- vpaddd (%esp),%xmm0,%xmm0
- vpaddd 16(%esp),%xmm1,%xmm1
- vpaddd 32(%esp),%xmm2,%xmm2
- vpaddd 48(%esp),%xmm3,%xmm3
- cmpl $64,%ecx
- jb .L020tail
- vpxor (%esi),%xmm0,%xmm0
- vpxor 16(%esi),%xmm1,%xmm1
- vpxor 32(%esi),%xmm2,%xmm2
- vpxor 48(%esi),%xmm3,%xmm3
- leal 64(%esi),%esi
- vmovdqu %xmm0,(%edi)
- vmovdqu %xmm1,16(%edi)
- vmovdqu %xmm2,32(%edi)
- vmovdqu %xmm3,48(%edi)
- leal 64(%edi),%edi
- subl $64,%ecx
- jnz .L019outer1x
- jmp .L017done
-.L020tail:
- vmovdqa %xmm0,(%esp)
- vmovdqa %xmm1,16(%esp)
- vmovdqa %xmm2,32(%esp)
- vmovdqa %xmm3,48(%esp)
- xorl %eax,%eax
- xorl %edx,%edx
- xorl %ebp,%ebp
-.L021tail_loop:
- movb (%esp,%ebp,1),%al
- movb (%esi,%ebp,1),%dl
- leal 1(%ebp),%ebp
- xorb %dl,%al
- movb %al,-1(%edi,%ebp,1)
- decl %ecx
- jnz .L021tail_loop
-.L017done:
- vzeroupper
- movl 512(%esp),%esp
- popl %edi
- popl %esi
- popl %ebx
- popl %ebp
- ret
-.size ChaCha20_xop,.-.L_ChaCha20_xop_begin
.comm OPENSSL_ia32cap_P,16,4
#else
.text
@@ -1394,8 +914,6 @@ ChaCha20_ssse3:
pushl %esi
pushl %edi
.Lssse3_shortcut:
- testl $2048,4(%ebp)
- jnz .Lxop_shortcut
movl 20(%esp),%edi
movl 24(%esp),%esi
movl 28(%esp),%ecx
@@ -1539,483 +1057,5 @@ ChaCha20_ssse3:
.byte 44,32,67,82,89,80,84,79,71,65,77,83,32,98,121,32
.byte 60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111
.byte 114,103,62,0
-.globl ChaCha20_xop
-.type ChaCha20_xop,@function
-.align 16
-ChaCha20_xop:
-.L_ChaCha20_xop_begin:
- pushl %ebp
- pushl %ebx
- pushl %esi
- pushl %edi
-.Lxop_shortcut:
- movl 20(%esp),%edi
- movl 24(%esp),%esi
- movl 28(%esp),%ecx
- movl 32(%esp),%edx
- movl 36(%esp),%ebx
- vzeroupper
- movl %esp,%ebp
- subl $524,%esp
- andl $-64,%esp
- movl %ebp,512(%esp)
- leal .Lssse3_data-.Lpic_point(%eax),%eax
- vmovdqu (%ebx),%xmm3
- cmpl $256,%ecx
- jb .L0141x
- movl %edx,516(%esp)
- movl %ebx,520(%esp)
- subl $256,%ecx
- leal 384(%esp),%ebp
- vmovdqu (%edx),%xmm7
- vpshufd $0,%xmm3,%xmm0
- vpshufd $85,%xmm3,%xmm1
- vpshufd $170,%xmm3,%xmm2
- vpshufd $255,%xmm3,%xmm3
- vpaddd 48(%eax),%xmm0,%xmm0
- vpshufd $0,%xmm7,%xmm4
- vpshufd $85,%xmm7,%xmm5
- vpsubd 64(%eax),%xmm0,%xmm0
- vpshufd $170,%xmm7,%xmm6
- vpshufd $255,%xmm7,%xmm7
- vmovdqa %xmm0,64(%ebp)
- vmovdqa %xmm1,80(%ebp)
- vmovdqa %xmm2,96(%ebp)
- vmovdqa %xmm3,112(%ebp)
- vmovdqu 16(%edx),%xmm3
- vmovdqa %xmm4,-64(%ebp)
- vmovdqa %xmm5,-48(%ebp)
- vmovdqa %xmm6,-32(%ebp)
- vmovdqa %xmm7,-16(%ebp)
- vmovdqa 32(%eax),%xmm7
- leal 128(%esp),%ebx
- vpshufd $0,%xmm3,%xmm0
- vpshufd $85,%xmm3,%xmm1
- vpshufd $170,%xmm3,%xmm2
- vpshufd $255,%xmm3,%xmm3
- vpshufd $0,%xmm7,%xmm4
- vpshufd $85,%xmm7,%xmm5
- vpshufd $170,%xmm7,%xmm6
- vpshufd $255,%xmm7,%xmm7
- vmovdqa %xmm0,(%ebp)
- vmovdqa %xmm1,16(%ebp)
- vmovdqa %xmm2,32(%ebp)
- vmovdqa %xmm3,48(%ebp)
- vmovdqa %xmm4,-128(%ebp)
- vmovdqa %xmm5,-112(%ebp)
- vmovdqa %xmm6,-96(%ebp)
- vmovdqa %xmm7,-80(%ebp)
- leal 128(%esi),%esi
- leal 128(%edi),%edi
- jmp .L015outer_loop
-.align 32
-.L015outer_loop:
- vmovdqa -112(%ebp),%xmm1
- vmovdqa -96(%ebp),%xmm2
- vmovdqa -80(%ebp),%xmm3
- vmovdqa -48(%ebp),%xmm5
- vmovdqa -32(%ebp),%xmm6
- vmovdqa -16(%ebp),%xmm7
- vmovdqa %xmm1,-112(%ebx)
- vmovdqa %xmm2,-96(%ebx)
- vmovdqa %xmm3,-80(%ebx)
- vmovdqa %xmm5,-48(%ebx)
- vmovdqa %xmm6,-32(%ebx)
- vmovdqa %xmm7,-16(%ebx)
- vmovdqa 32(%ebp),%xmm2
- vmovdqa 48(%ebp),%xmm3
- vmovdqa 64(%ebp),%xmm4
- vmovdqa 80(%ebp),%xmm5
- vmovdqa 96(%ebp),%xmm6
- vmovdqa 112(%ebp),%xmm7
- vpaddd 64(%eax),%xmm4,%xmm4
- vmovdqa %xmm2,32(%ebx)
- vmovdqa %xmm3,48(%ebx)
- vmovdqa %xmm4,64(%ebx)
- vmovdqa %xmm5,80(%ebx)
- vmovdqa %xmm6,96(%ebx)
- vmovdqa %xmm7,112(%ebx)
- vmovdqa %xmm4,64(%ebp)
- vmovdqa -128(%ebp),%xmm0
- vmovdqa %xmm4,%xmm6
- vmovdqa -64(%ebp),%xmm3
- vmovdqa (%ebp),%xmm4
- vmovdqa 16(%ebp),%xmm5
- movl $10,%edx
- nop
-.align 32
-.L016loop:
- vpaddd %xmm3,%xmm0,%xmm0
- vpxor %xmm0,%xmm6,%xmm6
-.byte 143,232,120,194,246,16
- vpaddd %xmm6,%xmm4,%xmm4
- vpxor %xmm4,%xmm3,%xmm2
- vmovdqa -112(%ebx),%xmm1
-.byte 143,232,120,194,210,12
- vmovdqa -48(%ebx),%xmm3
- vpaddd %xmm2,%xmm0,%xmm0
- vmovdqa 80(%ebx),%xmm7
- vpxor %xmm0,%xmm6,%xmm6
- vpaddd %xmm3,%xmm1,%xmm1
-.byte 143,232,120,194,246,8
- vmovdqa %xmm0,-128(%ebx)
- vpaddd %xmm6,%xmm4,%xmm4
- vmovdqa %xmm6,64(%ebx)
- vpxor %xmm4,%xmm2,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
-.byte 143,232,120,194,210,7
- vmovdqa %xmm4,(%ebx)
-.byte 143,232,120,194,255,16
- vmovdqa %xmm2,-64(%ebx)
- vpaddd %xmm7,%xmm5,%xmm5
- vmovdqa 32(%ebx),%xmm4
- vpxor %xmm5,%xmm3,%xmm3
- vmovdqa -96(%ebx),%xmm0
-.byte 143,232,120,194,219,12
- vmovdqa -32(%ebx),%xmm2
- vpaddd %xmm3,%xmm1,%xmm1
- vmovdqa 96(%ebx),%xmm6
- vpxor %xmm1,%xmm7,%xmm7
- vpaddd %xmm2,%xmm0,%xmm0
-.byte 143,232,120,194,255,8
- vmovdqa %xmm1,-112(%ebx)
- vpaddd %xmm7,%xmm5,%xmm5
- vmovdqa %xmm7,80(%ebx)
- vpxor %xmm5,%xmm3,%xmm3
- vpxor %xmm0,%xmm6,%xmm6
-.byte 143,232,120,194,219,7
- vmovdqa %xmm5,16(%ebx)
-.byte 143,232,120,194,246,16
- vmovdqa %xmm3,-48(%ebx)
- vpaddd %xmm6,%xmm4,%xmm4
- vmovdqa 48(%ebx),%xmm5
- vpxor %xmm4,%xmm2,%xmm2
- vmovdqa -80(%ebx),%xmm1
-.byte 143,232,120,194,210,12
- vmovdqa -16(%ebx),%xmm3
- vpaddd %xmm2,%xmm0,%xmm0
- vmovdqa 112(%ebx),%xmm7
- vpxor %xmm0,%xmm6,%xmm6
- vpaddd %xmm3,%xmm1,%xmm1
-.byte 143,232,120,194,246,8
- vmovdqa %xmm0,-96(%ebx)
- vpaddd %xmm6,%xmm4,%xmm4
- vmovdqa %xmm6,96(%ebx)
- vpxor %xmm4,%xmm2,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
-.byte 143,232,120,194,210,7
-.byte 143,232,120,194,255,16
- vmovdqa %xmm2,-32(%ebx)
- vpaddd %xmm7,%xmm5,%xmm5
- vpxor %xmm5,%xmm3,%xmm3
- vmovdqa -128(%ebx),%xmm0
-.byte 143,232,120,194,219,12
- vmovdqa -48(%ebx),%xmm2
- vpaddd %xmm3,%xmm1,%xmm1
- vpxor %xmm1,%xmm7,%xmm7
- vpaddd %xmm2,%xmm0,%xmm0
-.byte 143,232,120,194,255,8
- vmovdqa %xmm1,-80(%ebx)
- vpaddd %xmm7,%xmm5,%xmm5
- vpxor %xmm5,%xmm3,%xmm3
- vpxor %xmm0,%xmm7,%xmm6
-.byte 143,232,120,194,219,7
-.byte 143,232,120,194,246,16
- vmovdqa %xmm3,-16(%ebx)
- vpaddd %xmm6,%xmm4,%xmm4
- vpxor %xmm4,%xmm2,%xmm2
- vmovdqa -112(%ebx),%xmm1
-.byte 143,232,120,194,210,12
- vmovdqa -32(%ebx),%xmm3
- vpaddd %xmm2,%xmm0,%xmm0
- vmovdqa 64(%ebx),%xmm7
- vpxor %xmm0,%xmm6,%xmm6
- vpaddd %xmm3,%xmm1,%xmm1
-.byte 143,232,120,194,246,8
- vmovdqa %xmm0,-128(%ebx)
- vpaddd %xmm6,%xmm4,%xmm4
- vmovdqa %xmm6,112(%ebx)
- vpxor %xmm4,%xmm2,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
-.byte 143,232,120,194,210,7
- vmovdqa %xmm4,32(%ebx)
-.byte 143,232,120,194,255,16
- vmovdqa %xmm2,-48(%ebx)
- vpaddd %xmm7,%xmm5,%xmm5
- vmovdqa (%ebx),%xmm4
- vpxor %xmm5,%xmm3,%xmm3
- vmovdqa -96(%ebx),%xmm0
-.byte 143,232,120,194,219,12
- vmovdqa -16(%ebx),%xmm2
- vpaddd %xmm3,%xmm1,%xmm1
- vmovdqa 80(%ebx),%xmm6
- vpxor %xmm1,%xmm7,%xmm7
- vpaddd %xmm2,%xmm0,%xmm0
-.byte 143,232,120,194,255,8
- vmovdqa %xmm1,-112(%ebx)
- vpaddd %xmm7,%xmm5,%xmm5
- vmovdqa %xmm7,64(%ebx)
- vpxor %xmm5,%xmm3,%xmm3
- vpxor %xmm0,%xmm6,%xmm6
-.byte 143,232,120,194,219,7
- vmovdqa %xmm5,48(%ebx)
-.byte 143,232,120,194,246,16
- vmovdqa %xmm3,-32(%ebx)
- vpaddd %xmm6,%xmm4,%xmm4
- vmovdqa 16(%ebx),%xmm5
- vpxor %xmm4,%xmm2,%xmm2
- vmovdqa -80(%ebx),%xmm1
-.byte 143,232,120,194,210,12
- vmovdqa -64(%ebx),%xmm3
- vpaddd %xmm2,%xmm0,%xmm0
- vmovdqa 96(%ebx),%xmm7
- vpxor %xmm0,%xmm6,%xmm6
- vpaddd %xmm3,%xmm1,%xmm1
-.byte 143,232,120,194,246,8
- vmovdqa %xmm0,-96(%ebx)
- vpaddd %xmm6,%xmm4,%xmm4
- vmovdqa %xmm6,80(%ebx)
- vpxor %xmm4,%xmm2,%xmm2
- vpxor %xmm1,%xmm7,%xmm7
-.byte 143,232,120,194,210,7
-.byte 143,232,120,194,255,16
- vmovdqa %xmm2,-16(%ebx)
- vpaddd %xmm7,%xmm5,%xmm5
- vpxor %xmm5,%xmm3,%xmm3
- vmovdqa -128(%ebx),%xmm0
-.byte 143,232,120,194,219,12
- vpaddd %xmm3,%xmm1,%xmm1
- vmovdqa 64(%ebx),%xmm6
- vpxor %xmm1,%xmm7,%xmm7
-.byte 143,232,120,194,255,8
- vmovdqa %xmm1,-80(%ebx)
- vpaddd %xmm7,%xmm5,%xmm5
- vmovdqa %xmm7,96(%ebx)
- vpxor %xmm5,%xmm3,%xmm3
-.byte 143,232,120,194,219,7
- decl %edx
- jnz .L016loop
- vmovdqa %xmm3,-64(%ebx)
- vmovdqa %xmm4,(%ebx)
- vmovdqa %xmm5,16(%ebx)
- vmovdqa %xmm6,64(%ebx)
- vmovdqa %xmm7,96(%ebx)
- vmovdqa -112(%ebx),%xmm1
- vmovdqa -96(%ebx),%xmm2
- vmovdqa -80(%ebx),%xmm3
- vpaddd -128(%ebp),%xmm0,%xmm0
- vpaddd -112(%ebp),%xmm1,%xmm1
- vpaddd -96(%ebp),%xmm2,%xmm2
- vpaddd -80(%ebp),%xmm3,%xmm3
- vpunpckldq %xmm1,%xmm0,%xmm6
- vpunpckldq %xmm3,%xmm2,%xmm7
- vpunpckhdq %xmm1,%xmm0,%xmm0
- vpunpckhdq %xmm3,%xmm2,%xmm2
- vpunpcklqdq %xmm7,%xmm6,%xmm1
- vpunpckhqdq %xmm7,%xmm6,%xmm6
- vpunpcklqdq %xmm2,%xmm0,%xmm7
- vpunpckhqdq %xmm2,%xmm0,%xmm3
- vpxor -128(%esi),%xmm1,%xmm4
- vpxor -64(%esi),%xmm6,%xmm5
- vpxor (%esi),%xmm7,%xmm6
- vpxor 64(%esi),%xmm3,%xmm7
- leal 16(%esi),%esi
- vmovdqa -64(%ebx),%xmm0
- vmovdqa -48(%ebx),%xmm1
- vmovdqa -32(%ebx),%xmm2
- vmovdqa -16(%ebx),%xmm3
- vmovdqu %xmm4,-128(%edi)
- vmovdqu %xmm5,-64(%edi)
- vmovdqu %xmm6,(%edi)
- vmovdqu %xmm7,64(%edi)
- leal 16(%edi),%edi
- vpaddd -64(%ebp),%xmm0,%xmm0
- vpaddd -48(%ebp),%xmm1,%xmm1
- vpaddd -32(%ebp),%xmm2,%xmm2
- vpaddd -16(%ebp),%xmm3,%xmm3
- vpunpckldq %xmm1,%xmm0,%xmm6
- vpunpckldq %xmm3,%xmm2,%xmm7
- vpunpckhdq %xmm1,%xmm0,%xmm0
- vpunpckhdq %xmm3,%xmm2,%xmm2
- vpunpcklqdq %xmm7,%xmm6,%xmm1
- vpunpckhqdq %xmm7,%xmm6,%xmm6
- vpunpcklqdq %xmm2,%xmm0,%xmm7
- vpunpckhqdq %xmm2,%xmm0,%xmm3
- vpxor -128(%esi),%xmm1,%xmm4
- vpxor -64(%esi),%xmm6,%xmm5
- vpxor (%esi),%xmm7,%xmm6
- vpxor 64(%esi),%xmm3,%xmm7
- leal 16(%esi),%esi
- vmovdqa (%ebx),%xmm0
- vmovdqa 16(%ebx),%xmm1
- vmovdqa 32(%ebx),%xmm2
- vmovdqa 48(%ebx),%xmm3
- vmovdqu %xmm4,-128(%edi)
- vmovdqu %xmm5,-64(%edi)
- vmovdqu %xmm6,(%edi)
- vmovdqu %xmm7,64(%edi)
- leal 16(%edi),%edi
- vpaddd (%ebp),%xmm0,%xmm0
- vpaddd 16(%ebp),%xmm1,%xmm1
- vpaddd 32(%ebp),%xmm2,%xmm2
- vpaddd 48(%ebp),%xmm3,%xmm3
- vpunpckldq %xmm1,%xmm0,%xmm6
- vpunpckldq %xmm3,%xmm2,%xmm7
- vpunpckhdq %xmm1,%xmm0,%xmm0
- vpunpckhdq %xmm3,%xmm2,%xmm2
- vpunpcklqdq %xmm7,%xmm6,%xmm1
- vpunpckhqdq %xmm7,%xmm6,%xmm6
- vpunpcklqdq %xmm2,%xmm0,%xmm7
- vpunpckhqdq %xmm2,%xmm0,%xmm3
- vpxor -128(%esi),%xmm1,%xmm4
- vpxor -64(%esi),%xmm6,%xmm5
- vpxor (%esi),%xmm7,%xmm6
- vpxor 64(%esi),%xmm3,%xmm7
- leal 16(%esi),%esi
- vmovdqa 64(%ebx),%xmm0
- vmovdqa 80(%ebx),%xmm1
- vmovdqa 96(%ebx),%xmm2
- vmovdqa 112(%ebx),%xmm3
- vmovdqu %xmm4,-128(%edi)
- vmovdqu %xmm5,-64(%edi)
- vmovdqu %xmm6,(%edi)
- vmovdqu %xmm7,64(%edi)
- leal 16(%edi),%edi
- vpaddd 64(%ebp),%xmm0,%xmm0
- vpaddd 80(%ebp),%xmm1,%xmm1
- vpaddd 96(%ebp),%xmm2,%xmm2
- vpaddd 112(%ebp),%xmm3,%xmm3
- vpunpckldq %xmm1,%xmm0,%xmm6
- vpunpckldq %xmm3,%xmm2,%xmm7
- vpunpckhdq %xmm1,%xmm0,%xmm0
- vpunpckhdq %xmm3,%xmm2,%xmm2
- vpunpcklqdq %xmm7,%xmm6,%xmm1
- vpunpckhqdq %xmm7,%xmm6,%xmm6
- vpunpcklqdq %xmm2,%xmm0,%xmm7
- vpunpckhqdq %xmm2,%xmm0,%xmm3
- vpxor -128(%esi),%xmm1,%xmm4
- vpxor -64(%esi),%xmm6,%xmm5
- vpxor (%esi),%xmm7,%xmm6
- vpxor 64(%esi),%xmm3,%xmm7
- leal 208(%esi),%esi
- vmovdqu %xmm4,-128(%edi)
- vmovdqu %xmm5,-64(%edi)
- vmovdqu %xmm6,(%edi)
- vmovdqu %xmm7,64(%edi)
- leal 208(%edi),%edi
- subl $256,%ecx
- jnc .L015outer_loop
- addl $256,%ecx
- jz .L017done
- movl 520(%esp),%ebx
- leal -128(%esi),%esi
- movl 516(%esp),%edx
- leal -128(%edi),%edi
- vmovd 64(%ebp),%xmm2
- vmovdqu (%ebx),%xmm3
- vpaddd 96(%eax),%xmm2,%xmm2
- vpand 112(%eax),%xmm3,%xmm3
- vpor %xmm2,%xmm3,%xmm3
-.L0141x:
- vmovdqa 32(%eax),%xmm0
- vmovdqu (%edx),%xmm1
- vmovdqu 16(%edx),%xmm2
- vmovdqa (%eax),%xmm6
- vmovdqa 16(%eax),%xmm7
- movl %ebp,48(%esp)
- vmovdqa %xmm0,(%esp)
- vmovdqa %xmm1,16(%esp)
- vmovdqa %xmm2,32(%esp)
- vmovdqa %xmm3,48(%esp)
- movl $10,%edx
- jmp .L018loop1x
-.align 16
-.L019outer1x:
- vmovdqa 80(%eax),%xmm3
- vmovdqa (%esp),%xmm0
- vmovdqa 16(%esp),%xmm1
- vmovdqa 32(%esp),%xmm2
- vpaddd 48(%esp),%xmm3,%xmm3
- movl $10,%edx
- vmovdqa %xmm3,48(%esp)
- jmp .L018loop1x
-.align 16
-.L018loop1x:
- vpaddd %xmm1,%xmm0,%xmm0
- vpxor %xmm0,%xmm3,%xmm3
-.byte 143,232,120,194,219,16
- vpaddd %xmm3,%xmm2,%xmm2
- vpxor %xmm2,%xmm1,%xmm1
-.byte 143,232,120,194,201,12
- vpaddd %xmm1,%xmm0,%xmm0
- vpxor %xmm0,%xmm3,%xmm3
-.byte 143,232,120,194,219,8
- vpaddd %xmm3,%xmm2,%xmm2
- vpxor %xmm2,%xmm1,%xmm1
-.byte 143,232,120,194,201,7
- vpshufd $78,%xmm2,%xmm2
- vpshufd $57,%xmm1,%xmm1
- vpshufd $147,%xmm3,%xmm3
- vpaddd %xmm1,%xmm0,%xmm0
- vpxor %xmm0,%xmm3,%xmm3
-.byte 143,232,120,194,219,16
- vpaddd %xmm3,%xmm2,%xmm2
- vpxor %xmm2,%xmm1,%xmm1
-.byte 143,232,120,194,201,12
- vpaddd %xmm1,%xmm0,%xmm0
- vpxor %xmm0,%xmm3,%xmm3
-.byte 143,232,120,194,219,8
- vpaddd %xmm3,%xmm2,%xmm2
- vpxor %xmm2,%xmm1,%xmm1
-.byte 143,232,120,194,201,7
- vpshufd $78,%xmm2,%xmm2
- vpshufd $147,%xmm1,%xmm1
- vpshufd $57,%xmm3,%xmm3
- decl %edx
- jnz .L018loop1x
- vpaddd (%esp),%xmm0,%xmm0
- vpaddd 16(%esp),%xmm1,%xmm1
- vpaddd 32(%esp),%xmm2,%xmm2
- vpaddd 48(%esp),%xmm3,%xmm3
- cmpl $64,%ecx
- jb .L020tail
- vpxor (%esi),%xmm0,%xmm0
- vpxor 16(%esi),%xmm1,%xmm1
- vpxor 32(%esi),%xmm2,%xmm2
- vpxor 48(%esi),%xmm3,%xmm3
- leal 64(%esi),%esi
- vmovdqu %xmm0,(%edi)
- vmovdqu %xmm1,16(%edi)
- vmovdqu %xmm2,32(%edi)
- vmovdqu %xmm3,48(%edi)
- leal 64(%edi),%edi
- subl $64,%ecx
- jnz .L019outer1x
- jmp .L017done
-.L020tail:
- vmovdqa %xmm0,(%esp)
- vmovdqa %xmm1,16(%esp)
- vmovdqa %xmm2,32(%esp)
- vmovdqa %xmm3,48(%esp)
- xorl %eax,%eax
- xorl %edx,%edx
- xorl %ebp,%ebp
-.L021tail_loop:
- movb (%esp,%ebp,1),%al
- movb (%esi,%ebp,1),%dl
- leal 1(%ebp),%ebp
- xorb %dl,%al
- movb %al,-1(%edi,%ebp,1)
- decl %ecx
- jnz .L021tail_loop
-.L017done:
- vzeroupper
- movl 512(%esp),%esp
- popl %edi
- popl %esi
- popl %ebx
- popl %ebp
- ret
-.size ChaCha20_xop,.-.L_ChaCha20_xop_begin
.comm OPENSSL_ia32cap_P,16,4
#endif
diff --git a/secure/lib/libcrypto/i386/ecp_nistz256-x86.S b/secure/lib/libcrypto/i386/ecp_nistz256-x86.S
index 7d0c1b9eb9df8..eb413d9f1a736 100644
--- a/secure/lib/libcrypto/i386/ecp_nistz256-x86.S
+++ b/secure/lib/libcrypto/i386/ecp_nistz256-x86.S
@@ -4422,19 +4422,15 @@ ecp_nistz256_point_add:
orl 4(%edi),%eax
orl 8(%edi),%eax
orl 12(%edi),%eax
+ movl 576(%esp),%ebx
+ notl %ebx
+ orl %ebx,%eax
+ movl 580(%esp),%ebx
+ notl %ebx
+ orl %ebx,%eax
+ orl 584(%esp),%eax
.byte 62
jnz .L010add_proceed
- movl 576(%esp),%eax
- andl 580(%esp),%eax
- movl 584(%esp),%ebx
- jz .L010add_proceed
- testl %ebx,%ebx
- jz .L011add_double
- movl 616(%esp),%edi
- xorl %eax,%eax
- movl $24,%ecx
-.byte 252,243,171
- jmp .L012add_done
.align 16
.L011add_double:
movl 620(%esp),%esi
@@ -9590,19 +9586,15 @@ ecp_nistz256_point_add:
orl 4(%edi),%eax
orl 8(%edi),%eax
orl 12(%edi),%eax
+ movl 576(%esp),%ebx
+ notl %ebx
+ orl %ebx,%eax
+ movl 580(%esp),%ebx
+ notl %ebx
+ orl %ebx,%eax
+ orl 584(%esp),%eax
.byte 62
jnz .L010add_proceed
- movl 576(%esp),%eax
- andl 580(%esp),%eax
- movl 584(%esp),%ebx
- jz .L010add_proceed
- testl %ebx,%ebx
- jz .L011add_double
- movl 616(%esp),%edi
- xorl %eax,%eax
- movl $24,%ecx
-.byte 252,243,171
- jmp .L012add_done
.align 16
.L011add_double:
movl 620(%esp),%esi
diff --git a/secure/lib/libcrypto/i386/poly1305-x86.S b/secure/lib/libcrypto/i386/poly1305-x86.S
index 100deee40bf23..b394500278d5d 100644
--- a/secure/lib/libcrypto/i386/poly1305-x86.S
+++ b/secure/lib/libcrypto/i386/poly1305-x86.S
@@ -36,10 +36,6 @@ poly1305_init:
jne .L002no_sse2
leal _poly1305_blocks_sse2-.L001pic_point(%ebx),%eax
leal _poly1305_emit_sse2-.L001pic_point(%ebx),%edx
- movl 8(%edi),%ecx
- testl $32,%ecx
- jz .L002no_sse2
- leal _poly1305_blocks_avx2-.L001pic_point(%ebx),%eax
.L002no_sse2:
movl 20(%esp),%edi
movl %eax,(%ebp)
@@ -1348,557 +1344,6 @@ _poly1305_emit_sse2:
popl %ebp
ret
.size _poly1305_emit_sse2,.-_poly1305_emit_sse2
-.align 32
-.type _poly1305_init_avx2,@function
-.align 16
-_poly1305_init_avx2:
- vmovdqu 24(%edi),%xmm4
- leal 48(%edi),%edi
- movl %esp,%ebp
- subl $224,%esp
- andl $-16,%esp
- vmovdqa 64(%ebx),%xmm7
- vpand %xmm7,%xmm4,%xmm0
- vpsrlq $26,%xmm4,%xmm1
- vpsrldq $6,%xmm4,%xmm3
- vpand %xmm7,%xmm1,%xmm1
- vpsrlq $4,%xmm3,%xmm2
- vpsrlq $30,%xmm3,%xmm3
- vpand %xmm7,%xmm2,%xmm2
- vpand %xmm7,%xmm3,%xmm3
- vpsrldq $13,%xmm4,%xmm4
- leal 144(%esp),%edx
- movl $2,%ecx
-.L018square:
- vmovdqa %xmm0,(%esp)
- vmovdqa %xmm1,16(%esp)
- vmovdqa %xmm2,32(%esp)
- vmovdqa %xmm3,48(%esp)
- vmovdqa %xmm4,64(%esp)
- vpslld $2,%xmm1,%xmm6
- vpslld $2,%xmm2,%xmm5
- vpaddd %xmm1,%xmm6,%xmm6
- vpaddd %xmm2,%xmm5,%xmm5
- vmovdqa %xmm6,80(%esp)
- vmovdqa %xmm5,96(%esp)
- vpslld $2,%xmm3,%xmm6
- vpslld $2,%xmm4,%xmm5
- vpaddd %xmm3,%xmm6,%xmm6
- vpaddd %xmm4,%xmm5,%xmm5
- vmovdqa %xmm6,112(%esp)
- vmovdqa %xmm5,128(%esp)
- vpshufd $68,%xmm0,%xmm5
- vmovdqa %xmm1,%xmm6
- vpshufd $68,%xmm1,%xmm1
- vpshufd $68,%xmm2,%xmm2
- vpshufd $68,%xmm3,%xmm3
- vpshufd $68,%xmm4,%xmm4
- vmovdqa %xmm5,(%edx)
- vmovdqa %xmm1,16(%edx)
- vmovdqa %xmm2,32(%edx)
- vmovdqa %xmm3,48(%edx)
- vmovdqa %xmm4,64(%edx)
- vpmuludq %xmm0,%xmm4,%xmm4
- vpmuludq %xmm0,%xmm3,%xmm3
- vpmuludq %xmm0,%xmm2,%xmm2
- vpmuludq %xmm0,%xmm1,%xmm1
- vpmuludq %xmm0,%xmm5,%xmm0
- vpmuludq 48(%edx),%xmm6,%xmm5
- vpaddq %xmm5,%xmm4,%xmm4
- vpmuludq 32(%edx),%xmm6,%xmm7
- vpaddq %xmm7,%xmm3,%xmm3
- vpmuludq 16(%edx),%xmm6,%xmm5
- vpaddq %xmm5,%xmm2,%xmm2
- vmovdqa 80(%esp),%xmm7
- vpmuludq (%edx),%xmm6,%xmm6
- vpaddq %xmm6,%xmm1,%xmm1
- vmovdqa 32(%esp),%xmm5
- vpmuludq 64(%edx),%xmm7,%xmm7
- vpaddq %xmm7,%xmm0,%xmm0
- vpmuludq 32(%edx),%xmm5,%xmm6
- vpaddq %xmm6,%xmm4,%xmm4
- vpmuludq 16(%edx),%xmm5,%xmm7
- vpaddq %xmm7,%xmm3,%xmm3
- vmovdqa 96(%esp),%xmm6
- vpmuludq (%edx),%xmm5,%xmm5
- vpaddq %xmm5,%xmm2,%xmm2
- vpmuludq 64(%edx),%xmm6,%xmm7
- vpaddq %xmm7,%xmm1,%xmm1
- vmovdqa 48(%esp),%xmm5
- vpmuludq 48(%edx),%xmm6,%xmm6
- vpaddq %xmm6,%xmm0,%xmm0
- vpmuludq 16(%edx),%xmm5,%xmm7
- vpaddq %xmm7,%xmm4,%xmm4
- vmovdqa 112(%esp),%xmm6
- vpmuludq (%edx),%xmm5,%xmm5
- vpaddq %xmm5,%xmm3,%xmm3
- vpmuludq 64(%edx),%xmm6,%xmm7
- vpaddq %xmm7,%xmm2,%xmm2
- vpmuludq 48(%edx),%xmm6,%xmm5
- vpaddq %xmm5,%xmm1,%xmm1
- vmovdqa 64(%esp),%xmm7
- vpmuludq 32(%edx),%xmm6,%xmm6
- vpaddq %xmm6,%xmm0,%xmm0
- vmovdqa 128(%esp),%xmm5
- vpmuludq (%edx),%xmm7,%xmm7
- vpaddq %xmm7,%xmm4,%xmm4
- vpmuludq 64(%edx),%xmm5,%xmm6
- vpaddq %xmm6,%xmm3,%xmm3
- vpmuludq 16(%edx),%xmm5,%xmm7
- vpaddq %xmm7,%xmm0,%xmm0
- vpmuludq 32(%edx),%xmm5,%xmm6
- vpaddq %xmm6,%xmm1,%xmm1
- vmovdqa 64(%ebx),%xmm7
- vpmuludq 48(%edx),%xmm5,%xmm5
- vpaddq %xmm5,%xmm2,%xmm2
- vpsrlq $26,%xmm3,%xmm5
- vpand %xmm7,%xmm3,%xmm3
- vpsrlq $26,%xmm0,%xmm6
- vpand %xmm7,%xmm0,%xmm0
- vpaddq %xmm5,%xmm4,%xmm4
- vpaddq %xmm6,%xmm1,%xmm1
- vpsrlq $26,%xmm4,%xmm5
- vpand %xmm7,%xmm4,%xmm4
- vpsrlq $26,%xmm1,%xmm6
- vpand %xmm7,%xmm1,%xmm1
- vpaddq %xmm6,%xmm2,%xmm2
- vpaddd %xmm5,%xmm0,%xmm0
- vpsllq $2,%xmm5,%xmm5
- vpsrlq $26,%xmm2,%xmm6
- vpand %xmm7,%xmm2,%xmm2
- vpaddd %xmm5,%xmm0,%xmm0
- vpaddd %xmm6,%xmm3,%xmm3
- vpsrlq $26,%xmm3,%xmm6
- vpsrlq $26,%xmm0,%xmm5
- vpand %xmm7,%xmm0,%xmm0
- vpand %xmm7,%xmm3,%xmm3
- vpaddd %xmm5,%xmm1,%xmm1
- vpaddd %xmm6,%xmm4,%xmm4
- decl %ecx
- jz .L019square_break
- vpunpcklqdq (%esp),%xmm0,%xmm0
- vpunpcklqdq 16(%esp),%xmm1,%xmm1
- vpunpcklqdq 32(%esp),%xmm2,%xmm2
- vpunpcklqdq 48(%esp),%xmm3,%xmm3
- vpunpcklqdq 64(%esp),%xmm4,%xmm4
- jmp .L018square
-.L019square_break:
- vpsllq $32,%xmm0,%xmm0
- vpsllq $32,%xmm1,%xmm1
- vpsllq $32,%xmm2,%xmm2
- vpsllq $32,%xmm3,%xmm3
- vpsllq $32,%xmm4,%xmm4
- vpor (%esp),%xmm0,%xmm0
- vpor 16(%esp),%xmm1,%xmm1
- vpor 32(%esp),%xmm2,%xmm2
- vpor 48(%esp),%xmm3,%xmm3
- vpor 64(%esp),%xmm4,%xmm4
- vpshufd $141,%xmm0,%xmm0
- vpshufd $141,%xmm1,%xmm1
- vpshufd $141,%xmm2,%xmm2
- vpshufd $141,%xmm3,%xmm3
- vpshufd $141,%xmm4,%xmm4
- vmovdqu %xmm0,(%edi)
- vmovdqu %xmm1,16(%edi)
- vmovdqu %xmm2,32(%edi)
- vmovdqu %xmm3,48(%edi)
- vmovdqu %xmm4,64(%edi)
- vpslld $2,%xmm1,%xmm6
- vpslld $2,%xmm2,%xmm5
- vpaddd %xmm1,%xmm6,%xmm6
- vpaddd %xmm2,%xmm5,%xmm5
- vmovdqu %xmm6,80(%edi)
- vmovdqu %xmm5,96(%edi)
- vpslld $2,%xmm3,%xmm6
- vpslld $2,%xmm4,%xmm5
- vpaddd %xmm3,%xmm6,%xmm6
- vpaddd %xmm4,%xmm5,%xmm5
- vmovdqu %xmm6,112(%edi)
- vmovdqu %xmm5,128(%edi)
- movl %ebp,%esp
- leal -48(%edi),%edi
- ret
-.size _poly1305_init_avx2,.-_poly1305_init_avx2
-.align 32
-.type _poly1305_blocks_avx2,@function
-.align 16
-_poly1305_blocks_avx2:
- pushl %ebp
- pushl %ebx
- pushl %esi
- pushl %edi
- movl 20(%esp),%edi
- movl 24(%esp),%esi
- movl 28(%esp),%ecx
- movl 20(%edi),%eax
- andl $-16,%ecx
- jz .L020nodata
- cmpl $64,%ecx
- jae .L021enter_avx2
- testl %eax,%eax
- jz .Lenter_blocks
-.L021enter_avx2:
- vzeroupper
- call .L022pic_point
-.L022pic_point:
- popl %ebx
- leal .Lconst_sse2-.L022pic_point(%ebx),%ebx
- testl %eax,%eax
- jnz .L023base2_26
- call _poly1305_init_avx2
- movl (%edi),%eax
- movl 3(%edi),%ecx
- movl 6(%edi),%edx
- movl 9(%edi),%esi
- movl 13(%edi),%ebp
- shrl $2,%ecx
- andl $67108863,%eax
- shrl $4,%edx
- andl $67108863,%ecx
- shrl $6,%esi
- andl $67108863,%edx
- movl %eax,(%edi)
- movl %ecx,4(%edi)
- movl %edx,8(%edi)
- movl %esi,12(%edi)
- movl %ebp,16(%edi)
- movl $1,20(%edi)
- movl 24(%esp),%esi
- movl 28(%esp),%ecx
-.L023base2_26:
- movl 32(%esp),%eax
- movl %esp,%ebp
- subl $448,%esp
- andl $-512,%esp
- vmovdqu 48(%edi),%xmm0
- leal 288(%esp),%edx
- vmovdqu 64(%edi),%xmm1
- vmovdqu 80(%edi),%xmm2
- vmovdqu 96(%edi),%xmm3
- vmovdqu 112(%edi),%xmm4
- leal 48(%edi),%edi
- vpermq $64,%ymm0,%ymm0
- vpermq $64,%ymm1,%ymm1
- vpermq $64,%ymm2,%ymm2
- vpermq $64,%ymm3,%ymm3
- vpermq $64,%ymm4,%ymm4
- vpshufd $200,%ymm0,%ymm0
- vpshufd $200,%ymm1,%ymm1
- vpshufd $200,%ymm2,%ymm2
- vpshufd $200,%ymm3,%ymm3
- vpshufd $200,%ymm4,%ymm4
- vmovdqa %ymm0,-128(%edx)
- vmovdqu 80(%edi),%xmm0
- vmovdqa %ymm1,-96(%edx)
- vmovdqu 96(%edi),%xmm1
- vmovdqa %ymm2,-64(%edx)
- vmovdqu 112(%edi),%xmm2
- vmovdqa %ymm3,-32(%edx)
- vmovdqu 128(%edi),%xmm3
- vmovdqa %ymm4,(%edx)
- vpermq $64,%ymm0,%ymm0
- vpermq $64,%ymm1,%ymm1
- vpermq $64,%ymm2,%ymm2
- vpermq $64,%ymm3,%ymm3
- vpshufd $200,%ymm0,%ymm0
- vpshufd $200,%ymm1,%ymm1
- vpshufd $200,%ymm2,%ymm2
- vpshufd $200,%ymm3,%ymm3
- vmovdqa %ymm0,32(%edx)
- vmovd -48(%edi),%xmm0
- vmovdqa %ymm1,64(%edx)
- vmovd -44(%edi),%xmm1
- vmovdqa %ymm2,96(%edx)
- vmovd -40(%edi),%xmm2
- vmovdqa %ymm3,128(%edx)
- vmovd -36(%edi),%xmm3
- vmovd -32(%edi),%xmm4
- vmovdqa 64(%ebx),%ymm7
- negl %eax
- testl $63,%ecx
- jz .L024even
- movl %ecx,%edx
- andl $-64,%ecx
- andl $63,%edx
- vmovdqu (%esi),%xmm5
- cmpl $32,%edx
- jb .L025one
- vmovdqu 16(%esi),%xmm6
- je .L026two
- vinserti128 $1,32(%esi),%ymm5,%ymm5
- leal 48(%esi),%esi
- leal 8(%ebx),%ebx
- leal 296(%esp),%edx
- jmp .L027tail
-.L026two:
- leal 32(%esi),%esi
- leal 16(%ebx),%ebx
- leal 304(%esp),%edx
- jmp .L027tail
-.L025one:
- leal 16(%esi),%esi
- vpxor %ymm6,%ymm6,%ymm6
- leal 32(%ebx,%eax,8),%ebx
- leal 312(%esp),%edx
- jmp .L027tail
-.align 32
-.L024even:
- vmovdqu (%esi),%xmm5
- vmovdqu 16(%esi),%xmm6
- vinserti128 $1,32(%esi),%ymm5,%ymm5
- vinserti128 $1,48(%esi),%ymm6,%ymm6
- leal 64(%esi),%esi
- subl $64,%ecx
- jz .L027tail
-.L028loop:
- vmovdqa %ymm2,64(%esp)
- vpsrldq $6,%ymm5,%ymm2
- vmovdqa %ymm0,(%esp)
- vpsrldq $6,%ymm6,%ymm0
- vmovdqa %ymm1,32(%esp)
- vpunpckhqdq %ymm6,%ymm5,%ymm1
- vpunpcklqdq %ymm6,%ymm5,%ymm5
- vpunpcklqdq %ymm0,%ymm2,%ymm2
- vpsrlq $30,%ymm2,%ymm0
- vpsrlq $4,%ymm2,%ymm2
- vpsrlq $26,%ymm5,%ymm6
- vpsrlq $40,%ymm1,%ymm1
- vpand %ymm7,%ymm2,%ymm2
- vpand %ymm7,%ymm5,%ymm5
- vpand %ymm7,%ymm6,%ymm6
- vpand %ymm7,%ymm0,%ymm0
- vpor (%ebx),%ymm1,%ymm1
- vpaddq 64(%esp),%ymm2,%ymm2
- vpaddq (%esp),%ymm5,%ymm5
- vpaddq 32(%esp),%ymm6,%ymm6
- vpaddq %ymm3,%ymm0,%ymm0
- vpaddq %ymm4,%ymm1,%ymm1
- vpmuludq -96(%edx),%ymm2,%ymm3
- vmovdqa %ymm6,32(%esp)
- vpmuludq -64(%edx),%ymm2,%ymm4
- vmovdqa %ymm0,96(%esp)
- vpmuludq 96(%edx),%ymm2,%ymm0
- vmovdqa %ymm1,128(%esp)
- vpmuludq 128(%edx),%ymm2,%ymm1
- vpmuludq -128(%edx),%ymm2,%ymm2
- vpmuludq -32(%edx),%ymm5,%ymm7
- vpaddq %ymm7,%ymm3,%ymm3
- vpmuludq (%edx),%ymm5,%ymm6
- vpaddq %ymm6,%ymm4,%ymm4
- vpmuludq -128(%edx),%ymm5,%ymm7
- vpaddq %ymm7,%ymm0,%ymm0
- vmovdqa 32(%esp),%ymm7
- vpmuludq -96(%edx),%ymm5,%ymm6
- vpaddq %ymm6,%ymm1,%ymm1
- vpmuludq -64(%edx),%ymm5,%ymm5
- vpaddq %ymm5,%ymm2,%ymm2
- vpmuludq -64(%edx),%ymm7,%ymm6
- vpaddq %ymm6,%ymm3,%ymm3
- vpmuludq -32(%edx),%ymm7,%ymm5
- vpaddq %ymm5,%ymm4,%ymm4
- vpmuludq 128(%edx),%ymm7,%ymm6
- vpaddq %ymm6,%ymm0,%ymm0
- vmovdqa 96(%esp),%ymm6
- vpmuludq -128(%edx),%ymm7,%ymm5
- vpaddq %ymm5,%ymm1,%ymm1
- vpmuludq -96(%edx),%ymm7,%ymm7
- vpaddq %ymm7,%ymm2,%ymm2
- vpmuludq -128(%edx),%ymm6,%ymm5
- vpaddq %ymm5,%ymm3,%ymm3
- vpmuludq -96(%edx),%ymm6,%ymm7
- vpaddq %ymm7,%ymm4,%ymm4
- vpmuludq 64(%edx),%ymm6,%ymm5
- vpaddq %ymm5,%ymm0,%ymm0
- vmovdqa 128(%esp),%ymm5
- vpmuludq 96(%edx),%ymm6,%ymm7
- vpaddq %ymm7,%ymm1,%ymm1
- vpmuludq 128(%edx),%ymm6,%ymm6
- vpaddq %ymm6,%ymm2,%ymm2
- vpmuludq 128(%edx),%ymm5,%ymm7
- vpaddq %ymm7,%ymm3,%ymm3
- vpmuludq 32(%edx),%ymm5,%ymm6
- vpaddq %ymm6,%ymm0,%ymm0
- vpmuludq -128(%edx),%ymm5,%ymm7
- vpaddq %ymm7,%ymm4,%ymm4
- vmovdqa 64(%ebx),%ymm7
- vpmuludq 64(%edx),%ymm5,%ymm6
- vpaddq %ymm6,%ymm1,%ymm1
- vpmuludq 96(%edx),%ymm5,%ymm5
- vpaddq %ymm5,%ymm2,%ymm2
- vpsrlq $26,%ymm3,%ymm5
- vpand %ymm7,%ymm3,%ymm3
- vpsrlq $26,%ymm0,%ymm6
- vpand %ymm7,%ymm0,%ymm0
- vpaddq %ymm5,%ymm4,%ymm4
- vpaddq %ymm6,%ymm1,%ymm1
- vpsrlq $26,%ymm4,%ymm5
- vpand %ymm7,%ymm4,%ymm4
- vpsrlq $26,%ymm1,%ymm6
- vpand %ymm7,%ymm1,%ymm1
- vpaddq %ymm6,%ymm2,%ymm2
- vpaddq %ymm5,%ymm0,%ymm0
- vpsllq $2,%ymm5,%ymm5
- vpsrlq $26,%ymm2,%ymm6
- vpand %ymm7,%ymm2,%ymm2
- vpaddq %ymm5,%ymm0,%ymm0
- vpaddq %ymm6,%ymm3,%ymm3
- vpsrlq $26,%ymm3,%ymm6
- vpsrlq $26,%ymm0,%ymm5
- vpand %ymm7,%ymm0,%ymm0
- vpand %ymm7,%ymm3,%ymm3
- vpaddq %ymm5,%ymm1,%ymm1
- vpaddq %ymm6,%ymm4,%ymm4
- vmovdqu (%esi),%xmm5
- vmovdqu 16(%esi),%xmm6
- vinserti128 $1,32(%esi),%ymm5,%ymm5
- vinserti128 $1,48(%esi),%ymm6,%ymm6
- leal 64(%esi),%esi
- subl $64,%ecx
- jnz .L028loop
-.L027tail:
- vmovdqa %ymm2,64(%esp)
- vpsrldq $6,%ymm5,%ymm2
- vmovdqa %ymm0,(%esp)
- vpsrldq $6,%ymm6,%ymm0
- vmovdqa %ymm1,32(%esp)
- vpunpckhqdq %ymm6,%ymm5,%ymm1
- vpunpcklqdq %ymm6,%ymm5,%ymm5
- vpunpcklqdq %ymm0,%ymm2,%ymm2
- vpsrlq $30,%ymm2,%ymm0
- vpsrlq $4,%ymm2,%ymm2
- vpsrlq $26,%ymm5,%ymm6
- vpsrlq $40,%ymm1,%ymm1
- vpand %ymm7,%ymm2,%ymm2
- vpand %ymm7,%ymm5,%ymm5
- vpand %ymm7,%ymm6,%ymm6
- vpand %ymm7,%ymm0,%ymm0
- vpor (%ebx),%ymm1,%ymm1
- andl $-64,%ebx
- vpaddq 64(%esp),%ymm2,%ymm2
- vpaddq (%esp),%ymm5,%ymm5
- vpaddq 32(%esp),%ymm6,%ymm6
- vpaddq %ymm3,%ymm0,%ymm0
- vpaddq %ymm4,%ymm1,%ymm1
- vpmuludq -92(%edx),%ymm2,%ymm3
- vmovdqa %ymm6,32(%esp)
- vpmuludq -60(%edx),%ymm2,%ymm4
- vmovdqa %ymm0,96(%esp)
- vpmuludq 100(%edx),%ymm2,%ymm0
- vmovdqa %ymm1,128(%esp)
- vpmuludq 132(%edx),%ymm2,%ymm1
- vpmuludq -124(%edx),%ymm2,%ymm2
- vpmuludq -28(%edx),%ymm5,%ymm7
- vpaddq %ymm7,%ymm3,%ymm3
- vpmuludq 4(%edx),%ymm5,%ymm6
- vpaddq %ymm6,%ymm4,%ymm4
- vpmuludq -124(%edx),%ymm5,%ymm7
- vpaddq %ymm7,%ymm0,%ymm0
- vmovdqa 32(%esp),%ymm7
- vpmuludq -92(%edx),%ymm5,%ymm6
- vpaddq %ymm6,%ymm1,%ymm1
- vpmuludq -60(%edx),%ymm5,%ymm5
- vpaddq %ymm5,%ymm2,%ymm2
- vpmuludq -60(%edx),%ymm7,%ymm6
- vpaddq %ymm6,%ymm3,%ymm3
- vpmuludq -28(%edx),%ymm7,%ymm5
- vpaddq %ymm5,%ymm4,%ymm4
- vpmuludq 132(%edx),%ymm7,%ymm6
- vpaddq %ymm6,%ymm0,%ymm0
- vmovdqa 96(%esp),%ymm6
- vpmuludq -124(%edx),%ymm7,%ymm5
- vpaddq %ymm5,%ymm1,%ymm1
- vpmuludq -92(%edx),%ymm7,%ymm7
- vpaddq %ymm7,%ymm2,%ymm2
- vpmuludq -124(%edx),%ymm6,%ymm5
- vpaddq %ymm5,%ymm3,%ymm3
- vpmuludq -92(%edx),%ymm6,%ymm7
- vpaddq %ymm7,%ymm4,%ymm4
- vpmuludq 68(%edx),%ymm6,%ymm5
- vpaddq %ymm5,%ymm0,%ymm0
- vmovdqa 128(%esp),%ymm5
- vpmuludq 100(%edx),%ymm6,%ymm7
- vpaddq %ymm7,%ymm1,%ymm1
- vpmuludq 132(%edx),%ymm6,%ymm6
- vpaddq %ymm6,%ymm2,%ymm2
- vpmuludq 132(%edx),%ymm5,%ymm7
- vpaddq %ymm7,%ymm3,%ymm3
- vpmuludq 36(%edx),%ymm5,%ymm6
- vpaddq %ymm6,%ymm0,%ymm0
- vpmuludq -124(%edx),%ymm5,%ymm7
- vpaddq %ymm7,%ymm4,%ymm4
- vmovdqa 64(%ebx),%ymm7
- vpmuludq 68(%edx),%ymm5,%ymm6
- vpaddq %ymm6,%ymm1,%ymm1
- vpmuludq 100(%edx),%ymm5,%ymm5
- vpaddq %ymm5,%ymm2,%ymm2
- vpsrldq $8,%ymm4,%ymm5
- vpsrldq $8,%ymm3,%ymm6
- vpaddq %ymm5,%ymm4,%ymm4
- vpsrldq $8,%ymm0,%ymm5
- vpaddq %ymm6,%ymm3,%ymm3
- vpsrldq $8,%ymm1,%ymm6
- vpaddq %ymm5,%ymm0,%ymm0
- vpsrldq $8,%ymm2,%ymm5
- vpaddq %ymm6,%ymm1,%ymm1
- vpermq $2,%ymm4,%ymm6
- vpaddq %ymm5,%ymm2,%ymm2
- vpermq $2,%ymm3,%ymm5
- vpaddq %ymm6,%ymm4,%ymm4
- vpermq $2,%ymm0,%ymm6
- vpaddq %ymm5,%ymm3,%ymm3
- vpermq $2,%ymm1,%ymm5
- vpaddq %ymm6,%ymm0,%ymm0
- vpermq $2,%ymm2,%ymm6
- vpaddq %ymm5,%ymm1,%ymm1
- vpaddq %ymm6,%ymm2,%ymm2
- vpsrlq $26,%ymm3,%ymm5
- vpand %ymm7,%ymm3,%ymm3
- vpsrlq $26,%ymm0,%ymm6
- vpand %ymm7,%ymm0,%ymm0
- vpaddq %ymm5,%ymm4,%ymm4
- vpaddq %ymm6,%ymm1,%ymm1
- vpsrlq $26,%ymm4,%ymm5
- vpand %ymm7,%ymm4,%ymm4
- vpsrlq $26,%ymm1,%ymm6
- vpand %ymm7,%ymm1,%ymm1
- vpaddq %ymm6,%ymm2,%ymm2
- vpaddq %ymm5,%ymm0,%ymm0
- vpsllq $2,%ymm5,%ymm5
- vpsrlq $26,%ymm2,%ymm6
- vpand %ymm7,%ymm2,%ymm2
- vpaddq %ymm5,%ymm0,%ymm0
- vpaddq %ymm6,%ymm3,%ymm3
- vpsrlq $26,%ymm3,%ymm6
- vpsrlq $26,%ymm0,%ymm5
- vpand %ymm7,%ymm0,%ymm0
- vpand %ymm7,%ymm3,%ymm3
- vpaddq %ymm5,%ymm1,%ymm1
- vpaddq %ymm6,%ymm4,%ymm4
- cmpl $0,%ecx
- je .L029done
- vpshufd $252,%xmm0,%xmm0
- leal 288(%esp),%edx
- vpshufd $252,%xmm1,%xmm1
- vpshufd $252,%xmm2,%xmm2
- vpshufd $252,%xmm3,%xmm3
- vpshufd $252,%xmm4,%xmm4
- jmp .L024even
-.align 16
-.L029done:
- vmovd %xmm0,-48(%edi)
- vmovd %xmm1,-44(%edi)
- vmovd %xmm2,-40(%edi)
- vmovd %xmm3,-36(%edi)
- vmovd %xmm4,-32(%edi)
- vzeroupper
- movl %ebp,%esp
-.L020nodata:
- popl %edi
- popl %esi
- popl %ebx
- popl %ebp
- ret
-.size _poly1305_blocks_avx2,.-_poly1305_blocks_avx2
.align 64
.Lconst_sse2:
.long 16777216,0,16777216,0,16777216,0,16777216,0
@@ -1947,10 +1392,6 @@ poly1305_init:
jne .L002no_sse2
leal _poly1305_blocks_sse2-.L001pic_point(%ebx),%eax
leal _poly1305_emit_sse2-.L001pic_point(%ebx),%edx
- movl 8(%edi),%ecx
- testl $32,%ecx
- jz .L002no_sse2
- leal _poly1305_blocks_avx2-.L001pic_point(%ebx),%eax
.L002no_sse2:
movl 20(%esp),%edi
movl %eax,(%ebp)
@@ -3259,557 +2700,6 @@ _poly1305_emit_sse2:
popl %ebp
ret
.size _poly1305_emit_sse2,.-_poly1305_emit_sse2
-.align 32
-.type _poly1305_init_avx2,@function
-.align 16
-_poly1305_init_avx2:
- vmovdqu 24(%edi),%xmm4
- leal 48(%edi),%edi
- movl %esp,%ebp
- subl $224,%esp
- andl $-16,%esp
- vmovdqa 64(%ebx),%xmm7
- vpand %xmm7,%xmm4,%xmm0
- vpsrlq $26,%xmm4,%xmm1
- vpsrldq $6,%xmm4,%xmm3
- vpand %xmm7,%xmm1,%xmm1
- vpsrlq $4,%xmm3,%xmm2
- vpsrlq $30,%xmm3,%xmm3
- vpand %xmm7,%xmm2,%xmm2
- vpand %xmm7,%xmm3,%xmm3
- vpsrldq $13,%xmm4,%xmm4
- leal 144(%esp),%edx
- movl $2,%ecx
-.L018square:
- vmovdqa %xmm0,(%esp)
- vmovdqa %xmm1,16(%esp)
- vmovdqa %xmm2,32(%esp)
- vmovdqa %xmm3,48(%esp)
- vmovdqa %xmm4,64(%esp)
- vpslld $2,%xmm1,%xmm6
- vpslld $2,%xmm2,%xmm5
- vpaddd %xmm1,%xmm6,%xmm6
- vpaddd %xmm2,%xmm5,%xmm5
- vmovdqa %xmm6,80(%esp)
- vmovdqa %xmm5,96(%esp)
- vpslld $2,%xmm3,%xmm6
- vpslld $2,%xmm4,%xmm5
- vpaddd %xmm3,%xmm6,%xmm6
- vpaddd %xmm4,%xmm5,%xmm5
- vmovdqa %xmm6,112(%esp)
- vmovdqa %xmm5,128(%esp)
- vpshufd $68,%xmm0,%xmm5
- vmovdqa %xmm1,%xmm6
- vpshufd $68,%xmm1,%xmm1
- vpshufd $68,%xmm2,%xmm2
- vpshufd $68,%xmm3,%xmm3
- vpshufd $68,%xmm4,%xmm4
- vmovdqa %xmm5,(%edx)
- vmovdqa %xmm1,16(%edx)
- vmovdqa %xmm2,32(%edx)
- vmovdqa %xmm3,48(%edx)
- vmovdqa %xmm4,64(%edx)
- vpmuludq %xmm0,%xmm4,%xmm4
- vpmuludq %xmm0,%xmm3,%xmm3
- vpmuludq %xmm0,%xmm2,%xmm2
- vpmuludq %xmm0,%xmm1,%xmm1
- vpmuludq %xmm0,%xmm5,%xmm0
- vpmuludq 48(%edx),%xmm6,%xmm5
- vpaddq %xmm5,%xmm4,%xmm4
- vpmuludq 32(%edx),%xmm6,%xmm7
- vpaddq %xmm7,%xmm3,%xmm3
- vpmuludq 16(%edx),%xmm6,%xmm5
- vpaddq %xmm5,%xmm2,%xmm2
- vmovdqa 80(%esp),%xmm7
- vpmuludq (%edx),%xmm6,%xmm6
- vpaddq %xmm6,%xmm1,%xmm1
- vmovdqa 32(%esp),%xmm5
- vpmuludq 64(%edx),%xmm7,%xmm7
- vpaddq %xmm7,%xmm0,%xmm0
- vpmuludq 32(%edx),%xmm5,%xmm6
- vpaddq %xmm6,%xmm4,%xmm4
- vpmuludq 16(%edx),%xmm5,%xmm7
- vpaddq %xmm7,%xmm3,%xmm3
- vmovdqa 96(%esp),%xmm6
- vpmuludq (%edx),%xmm5,%xmm5
- vpaddq %xmm5,%xmm2,%xmm2
- vpmuludq 64(%edx),%xmm6,%xmm7
- vpaddq %xmm7,%xmm1,%xmm1
- vmovdqa 48(%esp),%xmm5
- vpmuludq 48(%edx),%xmm6,%xmm6
- vpaddq %xmm6,%xmm0,%xmm0
- vpmuludq 16(%edx),%xmm5,%xmm7
- vpaddq %xmm7,%xmm4,%xmm4
- vmovdqa 112(%esp),%xmm6
- vpmuludq (%edx),%xmm5,%xmm5
- vpaddq %xmm5,%xmm3,%xmm3
- vpmuludq 64(%edx),%xmm6,%xmm7
- vpaddq %xmm7,%xmm2,%xmm2
- vpmuludq 48(%edx),%xmm6,%xmm5
- vpaddq %xmm5,%xmm1,%xmm1
- vmovdqa 64(%esp),%xmm7
- vpmuludq 32(%edx),%xmm6,%xmm6
- vpaddq %xmm6,%xmm0,%xmm0
- vmovdqa 128(%esp),%xmm5
- vpmuludq (%edx),%xmm7,%xmm7
- vpaddq %xmm7,%xmm4,%xmm4
- vpmuludq 64(%edx),%xmm5,%xmm6
- vpaddq %xmm6,%xmm3,%xmm3
- vpmuludq 16(%edx),%xmm5,%xmm7
- vpaddq %xmm7,%xmm0,%xmm0
- vpmuludq 32(%edx),%xmm5,%xmm6
- vpaddq %xmm6,%xmm1,%xmm1
- vmovdqa 64(%ebx),%xmm7
- vpmuludq 48(%edx),%xmm5,%xmm5
- vpaddq %xmm5,%xmm2,%xmm2
- vpsrlq $26,%xmm3,%xmm5
- vpand %xmm7,%xmm3,%xmm3
- vpsrlq $26,%xmm0,%xmm6
- vpand %xmm7,%xmm0,%xmm0
- vpaddq %xmm5,%xmm4,%xmm4
- vpaddq %xmm6,%xmm1,%xmm1
- vpsrlq $26,%xmm4,%xmm5
- vpand %xmm7,%xmm4,%xmm4
- vpsrlq $26,%xmm1,%xmm6
- vpand %xmm7,%xmm1,%xmm1
- vpaddq %xmm6,%xmm2,%xmm2
- vpaddd %xmm5,%xmm0,%xmm0
- vpsllq $2,%xmm5,%xmm5
- vpsrlq $26,%xmm2,%xmm6
- vpand %xmm7,%xmm2,%xmm2
- vpaddd %xmm5,%xmm0,%xmm0
- vpaddd %xmm6,%xmm3,%xmm3
- vpsrlq $26,%xmm3,%xmm6
- vpsrlq $26,%xmm0,%xmm5
- vpand %xmm7,%xmm0,%xmm0
- vpand %xmm7,%xmm3,%xmm3
- vpaddd %xmm5,%xmm1,%xmm1
- vpaddd %xmm6,%xmm4,%xmm4
- decl %ecx
- jz .L019square_break
- vpunpcklqdq (%esp),%xmm0,%xmm0
- vpunpcklqdq 16(%esp),%xmm1,%xmm1
- vpunpcklqdq 32(%esp),%xmm2,%xmm2
- vpunpcklqdq 48(%esp),%xmm3,%xmm3
- vpunpcklqdq 64(%esp),%xmm4,%xmm4
- jmp .L018square
-.L019square_break:
- vpsllq $32,%xmm0,%xmm0
- vpsllq $32,%xmm1,%xmm1
- vpsllq $32,%xmm2,%xmm2
- vpsllq $32,%xmm3,%xmm3
- vpsllq $32,%xmm4,%xmm4
- vpor (%esp),%xmm0,%xmm0
- vpor 16(%esp),%xmm1,%xmm1
- vpor 32(%esp),%xmm2,%xmm2
- vpor 48(%esp),%xmm3,%xmm3
- vpor 64(%esp),%xmm4,%xmm4
- vpshufd $141,%xmm0,%xmm0
- vpshufd $141,%xmm1,%xmm1
- vpshufd $141,%xmm2,%xmm2
- vpshufd $141,%xmm3,%xmm3
- vpshufd $141,%xmm4,%xmm4
- vmovdqu %xmm0,(%edi)
- vmovdqu %xmm1,16(%edi)
- vmovdqu %xmm2,32(%edi)
- vmovdqu %xmm3,48(%edi)
- vmovdqu %xmm4,64(%edi)
- vpslld $2,%xmm1,%xmm6
- vpslld $2,%xmm2,%xmm5
- vpaddd %xmm1,%xmm6,%xmm6
- vpaddd %xmm2,%xmm5,%xmm5
- vmovdqu %xmm6,80(%edi)
- vmovdqu %xmm5,96(%edi)
- vpslld $2,%xmm3,%xmm6
- vpslld $2,%xmm4,%xmm5
- vpaddd %xmm3,%xmm6,%xmm6
- vpaddd %xmm4,%xmm5,%xmm5
- vmovdqu %xmm6,112(%edi)
- vmovdqu %xmm5,128(%edi)
- movl %ebp,%esp
- leal -48(%edi),%edi
- ret
-.size _poly1305_init_avx2,.-_poly1305_init_avx2
-.align 32
-.type _poly1305_blocks_avx2,@function
-.align 16
-_poly1305_blocks_avx2:
- pushl %ebp
- pushl %ebx
- pushl %esi
- pushl %edi
- movl 20(%esp),%edi
- movl 24(%esp),%esi
- movl 28(%esp),%ecx
- movl 20(%edi),%eax
- andl $-16,%ecx
- jz .L020nodata
- cmpl $64,%ecx
- jae .L021enter_avx2
- testl %eax,%eax
- jz .Lenter_blocks
-.L021enter_avx2:
- vzeroupper
- call .L022pic_point
-.L022pic_point:
- popl %ebx
- leal .Lconst_sse2-.L022pic_point(%ebx),%ebx
- testl %eax,%eax
- jnz .L023base2_26
- call _poly1305_init_avx2
- movl (%edi),%eax
- movl 3(%edi),%ecx
- movl 6(%edi),%edx
- movl 9(%edi),%esi
- movl 13(%edi),%ebp
- shrl $2,%ecx
- andl $67108863,%eax
- shrl $4,%edx
- andl $67108863,%ecx
- shrl $6,%esi
- andl $67108863,%edx
- movl %eax,(%edi)
- movl %ecx,4(%edi)
- movl %edx,8(%edi)
- movl %esi,12(%edi)
- movl %ebp,16(%edi)
- movl $1,20(%edi)
- movl 24(%esp),%esi
- movl 28(%esp),%ecx
-.L023base2_26:
- movl 32(%esp),%eax
- movl %esp,%ebp
- subl $448,%esp
- andl $-512,%esp
- vmovdqu 48(%edi),%xmm0
- leal 288(%esp),%edx
- vmovdqu 64(%edi),%xmm1
- vmovdqu 80(%edi),%xmm2
- vmovdqu 96(%edi),%xmm3
- vmovdqu 112(%edi),%xmm4
- leal 48(%edi),%edi
- vpermq $64,%ymm0,%ymm0
- vpermq $64,%ymm1,%ymm1
- vpermq $64,%ymm2,%ymm2
- vpermq $64,%ymm3,%ymm3
- vpermq $64,%ymm4,%ymm4
- vpshufd $200,%ymm0,%ymm0
- vpshufd $200,%ymm1,%ymm1
- vpshufd $200,%ymm2,%ymm2
- vpshufd $200,%ymm3,%ymm3
- vpshufd $200,%ymm4,%ymm4
- vmovdqa %ymm0,-128(%edx)
- vmovdqu 80(%edi),%xmm0
- vmovdqa %ymm1,-96(%edx)
- vmovdqu 96(%edi),%xmm1
- vmovdqa %ymm2,-64(%edx)
- vmovdqu 112(%edi),%xmm2
- vmovdqa %ymm3,-32(%edx)
- vmovdqu 128(%edi),%xmm3
- vmovdqa %ymm4,(%edx)
- vpermq $64,%ymm0,%ymm0
- vpermq $64,%ymm1,%ymm1
- vpermq $64,%ymm2,%ymm2
- vpermq $64,%ymm3,%ymm3
- vpshufd $200,%ymm0,%ymm0
- vpshufd $200,%ymm1,%ymm1
- vpshufd $200,%ymm2,%ymm2
- vpshufd $200,%ymm3,%ymm3
- vmovdqa %ymm0,32(%edx)
- vmovd -48(%edi),%xmm0
- vmovdqa %ymm1,64(%edx)
- vmovd -44(%edi),%xmm1
- vmovdqa %ymm2,96(%edx)
- vmovd -40(%edi),%xmm2
- vmovdqa %ymm3,128(%edx)
- vmovd -36(%edi),%xmm3
- vmovd -32(%edi),%xmm4
- vmovdqa 64(%ebx),%ymm7
- negl %eax
- testl $63,%ecx
- jz .L024even
- movl %ecx,%edx
- andl $-64,%ecx
- andl $63,%edx
- vmovdqu (%esi),%xmm5
- cmpl $32,%edx
- jb .L025one
- vmovdqu 16(%esi),%xmm6
- je .L026two
- vinserti128 $1,32(%esi),%ymm5,%ymm5
- leal 48(%esi),%esi
- leal 8(%ebx),%ebx
- leal 296(%esp),%edx
- jmp .L027tail
-.L026two:
- leal 32(%esi),%esi
- leal 16(%ebx),%ebx
- leal 304(%esp),%edx
- jmp .L027tail
-.L025one:
- leal 16(%esi),%esi
- vpxor %ymm6,%ymm6,%ymm6
- leal 32(%ebx,%eax,8),%ebx
- leal 312(%esp),%edx
- jmp .L027tail
-.align 32
-.L024even:
- vmovdqu (%esi),%xmm5
- vmovdqu 16(%esi),%xmm6
- vinserti128 $1,32(%esi),%ymm5,%ymm5
- vinserti128 $1,48(%esi),%ymm6,%ymm6
- leal 64(%esi),%esi
- subl $64,%ecx
- jz .L027tail
-.L028loop:
- vmovdqa %ymm2,64(%esp)
- vpsrldq $6,%ymm5,%ymm2
- vmovdqa %ymm0,(%esp)
- vpsrldq $6,%ymm6,%ymm0
- vmovdqa %ymm1,32(%esp)
- vpunpckhqdq %ymm6,%ymm5,%ymm1
- vpunpcklqdq %ymm6,%ymm5,%ymm5
- vpunpcklqdq %ymm0,%ymm2,%ymm2
- vpsrlq $30,%ymm2,%ymm0
- vpsrlq $4,%ymm2,%ymm2
- vpsrlq $26,%ymm5,%ymm6
- vpsrlq $40,%ymm1,%ymm1
- vpand %ymm7,%ymm2,%ymm2
- vpand %ymm7,%ymm5,%ymm5
- vpand %ymm7,%ymm6,%ymm6
- vpand %ymm7,%ymm0,%ymm0
- vpor (%ebx),%ymm1,%ymm1
- vpaddq 64(%esp),%ymm2,%ymm2
- vpaddq (%esp),%ymm5,%ymm5
- vpaddq 32(%esp),%ymm6,%ymm6
- vpaddq %ymm3,%ymm0,%ymm0
- vpaddq %ymm4,%ymm1,%ymm1
- vpmuludq -96(%edx),%ymm2,%ymm3
- vmovdqa %ymm6,32(%esp)
- vpmuludq -64(%edx),%ymm2,%ymm4
- vmovdqa %ymm0,96(%esp)
- vpmuludq 96(%edx),%ymm2,%ymm0
- vmovdqa %ymm1,128(%esp)
- vpmuludq 128(%edx),%ymm2,%ymm1
- vpmuludq -128(%edx),%ymm2,%ymm2
- vpmuludq -32(%edx),%ymm5,%ymm7
- vpaddq %ymm7,%ymm3,%ymm3
- vpmuludq (%edx),%ymm5,%ymm6
- vpaddq %ymm6,%ymm4,%ymm4
- vpmuludq -128(%edx),%ymm5,%ymm7
- vpaddq %ymm7,%ymm0,%ymm0
- vmovdqa 32(%esp),%ymm7
- vpmuludq -96(%edx),%ymm5,%ymm6
- vpaddq %ymm6,%ymm1,%ymm1
- vpmuludq -64(%edx),%ymm5,%ymm5
- vpaddq %ymm5,%ymm2,%ymm2
- vpmuludq -64(%edx),%ymm7,%ymm6
- vpaddq %ymm6,%ymm3,%ymm3
- vpmuludq -32(%edx),%ymm7,%ymm5
- vpaddq %ymm5,%ymm4,%ymm4
- vpmuludq 128(%edx),%ymm7,%ymm6
- vpaddq %ymm6,%ymm0,%ymm0
- vmovdqa 96(%esp),%ymm6
- vpmuludq -128(%edx),%ymm7,%ymm5
- vpaddq %ymm5,%ymm1,%ymm1
- vpmuludq -96(%edx),%ymm7,%ymm7
- vpaddq %ymm7,%ymm2,%ymm2
- vpmuludq -128(%edx),%ymm6,%ymm5
- vpaddq %ymm5,%ymm3,%ymm3
- vpmuludq -96(%edx),%ymm6,%ymm7
- vpaddq %ymm7,%ymm4,%ymm4
- vpmuludq 64(%edx),%ymm6,%ymm5
- vpaddq %ymm5,%ymm0,%ymm0
- vmovdqa 128(%esp),%ymm5
- vpmuludq 96(%edx),%ymm6,%ymm7
- vpaddq %ymm7,%ymm1,%ymm1
- vpmuludq 128(%edx),%ymm6,%ymm6
- vpaddq %ymm6,%ymm2,%ymm2
- vpmuludq 128(%edx),%ymm5,%ymm7
- vpaddq %ymm7,%ymm3,%ymm3
- vpmuludq 32(%edx),%ymm5,%ymm6
- vpaddq %ymm6,%ymm0,%ymm0
- vpmuludq -128(%edx),%ymm5,%ymm7
- vpaddq %ymm7,%ymm4,%ymm4
- vmovdqa 64(%ebx),%ymm7
- vpmuludq 64(%edx),%ymm5,%ymm6
- vpaddq %ymm6,%ymm1,%ymm1
- vpmuludq 96(%edx),%ymm5,%ymm5
- vpaddq %ymm5,%ymm2,%ymm2
- vpsrlq $26,%ymm3,%ymm5
- vpand %ymm7,%ymm3,%ymm3
- vpsrlq $26,%ymm0,%ymm6
- vpand %ymm7,%ymm0,%ymm0
- vpaddq %ymm5,%ymm4,%ymm4
- vpaddq %ymm6,%ymm1,%ymm1
- vpsrlq $26,%ymm4,%ymm5
- vpand %ymm7,%ymm4,%ymm4
- vpsrlq $26,%ymm1,%ymm6
- vpand %ymm7,%ymm1,%ymm1
- vpaddq %ymm6,%ymm2,%ymm2
- vpaddq %ymm5,%ymm0,%ymm0
- vpsllq $2,%ymm5,%ymm5
- vpsrlq $26,%ymm2,%ymm6
- vpand %ymm7,%ymm2,%ymm2
- vpaddq %ymm5,%ymm0,%ymm0
- vpaddq %ymm6,%ymm3,%ymm3
- vpsrlq $26,%ymm3,%ymm6
- vpsrlq $26,%ymm0,%ymm5
- vpand %ymm7,%ymm0,%ymm0
- vpand %ymm7,%ymm3,%ymm3
- vpaddq %ymm5,%ymm1,%ymm1
- vpaddq %ymm6,%ymm4,%ymm4
- vmovdqu (%esi),%xmm5
- vmovdqu 16(%esi),%xmm6
- vinserti128 $1,32(%esi),%ymm5,%ymm5
- vinserti128 $1,48(%esi),%ymm6,%ymm6
- leal 64(%esi),%esi
- subl $64,%ecx
- jnz .L028loop
-.L027tail:
- vmovdqa %ymm2,64(%esp)
- vpsrldq $6,%ymm5,%ymm2
- vmovdqa %ymm0,(%esp)
- vpsrldq $6,%ymm6,%ymm0
- vmovdqa %ymm1,32(%esp)
- vpunpckhqdq %ymm6,%ymm5,%ymm1
- vpunpcklqdq %ymm6,%ymm5,%ymm5
- vpunpcklqdq %ymm0,%ymm2,%ymm2
- vpsrlq $30,%ymm2,%ymm0
- vpsrlq $4,%ymm2,%ymm2
- vpsrlq $26,%ymm5,%ymm6
- vpsrlq $40,%ymm1,%ymm1
- vpand %ymm7,%ymm2,%ymm2
- vpand %ymm7,%ymm5,%ymm5
- vpand %ymm7,%ymm6,%ymm6
- vpand %ymm7,%ymm0,%ymm0
- vpor (%ebx),%ymm1,%ymm1
- andl $-64,%ebx
- vpaddq 64(%esp),%ymm2,%ymm2
- vpaddq (%esp),%ymm5,%ymm5
- vpaddq 32(%esp),%ymm6,%ymm6
- vpaddq %ymm3,%ymm0,%ymm0
- vpaddq %ymm4,%ymm1,%ymm1
- vpmuludq -92(%edx),%ymm2,%ymm3
- vmovdqa %ymm6,32(%esp)
- vpmuludq -60(%edx),%ymm2,%ymm4
- vmovdqa %ymm0,96(%esp)
- vpmuludq 100(%edx),%ymm2,%ymm0
- vmovdqa %ymm1,128(%esp)
- vpmuludq 132(%edx),%ymm2,%ymm1
- vpmuludq -124(%edx),%ymm2,%ymm2
- vpmuludq -28(%edx),%ymm5,%ymm7
- vpaddq %ymm7,%ymm3,%ymm3
- vpmuludq 4(%edx),%ymm5,%ymm6
- vpaddq %ymm6,%ymm4,%ymm4
- vpmuludq -124(%edx),%ymm5,%ymm7
- vpaddq %ymm7,%ymm0,%ymm0
- vmovdqa 32(%esp),%ymm7
- vpmuludq -92(%edx),%ymm5,%ymm6
- vpaddq %ymm6,%ymm1,%ymm1
- vpmuludq -60(%edx),%ymm5,%ymm5
- vpaddq %ymm5,%ymm2,%ymm2
- vpmuludq -60(%edx),%ymm7,%ymm6
- vpaddq %ymm6,%ymm3,%ymm3
- vpmuludq -28(%edx),%ymm7,%ymm5
- vpaddq %ymm5,%ymm4,%ymm4
- vpmuludq 132(%edx),%ymm7,%ymm6
- vpaddq %ymm6,%ymm0,%ymm0
- vmovdqa 96(%esp),%ymm6
- vpmuludq -124(%edx),%ymm7,%ymm5
- vpaddq %ymm5,%ymm1,%ymm1
- vpmuludq -92(%edx),%ymm7,%ymm7
- vpaddq %ymm7,%ymm2,%ymm2
- vpmuludq -124(%edx),%ymm6,%ymm5
- vpaddq %ymm5,%ymm3,%ymm3
- vpmuludq -92(%edx),%ymm6,%ymm7
- vpaddq %ymm7,%ymm4,%ymm4
- vpmuludq 68(%edx),%ymm6,%ymm5
- vpaddq %ymm5,%ymm0,%ymm0
- vmovdqa 128(%esp),%ymm5
- vpmuludq 100(%edx),%ymm6,%ymm7
- vpaddq %ymm7,%ymm1,%ymm1
- vpmuludq 132(%edx),%ymm6,%ymm6
- vpaddq %ymm6,%ymm2,%ymm2
- vpmuludq 132(%edx),%ymm5,%ymm7
- vpaddq %ymm7,%ymm3,%ymm3
- vpmuludq 36(%edx),%ymm5,%ymm6
- vpaddq %ymm6,%ymm0,%ymm0
- vpmuludq -124(%edx),%ymm5,%ymm7
- vpaddq %ymm7,%ymm4,%ymm4
- vmovdqa 64(%ebx),%ymm7
- vpmuludq 68(%edx),%ymm5,%ymm6
- vpaddq %ymm6,%ymm1,%ymm1
- vpmuludq 100(%edx),%ymm5,%ymm5
- vpaddq %ymm5,%ymm2,%ymm2
- vpsrldq $8,%ymm4,%ymm5
- vpsrldq $8,%ymm3,%ymm6
- vpaddq %ymm5,%ymm4,%ymm4
- vpsrldq $8,%ymm0,%ymm5
- vpaddq %ymm6,%ymm3,%ymm3
- vpsrldq $8,%ymm1,%ymm6
- vpaddq %ymm5,%ymm0,%ymm0
- vpsrldq $8,%ymm2,%ymm5
- vpaddq %ymm6,%ymm1,%ymm1
- vpermq $2,%ymm4,%ymm6
- vpaddq %ymm5,%ymm2,%ymm2
- vpermq $2,%ymm3,%ymm5
- vpaddq %ymm6,%ymm4,%ymm4
- vpermq $2,%ymm0,%ymm6
- vpaddq %ymm5,%ymm3,%ymm3
- vpermq $2,%ymm1,%ymm5
- vpaddq %ymm6,%ymm0,%ymm0
- vpermq $2,%ymm2,%ymm6
- vpaddq %ymm5,%ymm1,%ymm1
- vpaddq %ymm6,%ymm2,%ymm2
- vpsrlq $26,%ymm3,%ymm5
- vpand %ymm7,%ymm3,%ymm3
- vpsrlq $26,%ymm0,%ymm6
- vpand %ymm7,%ymm0,%ymm0
- vpaddq %ymm5,%ymm4,%ymm4
- vpaddq %ymm6,%ymm1,%ymm1
- vpsrlq $26,%ymm4,%ymm5
- vpand %ymm7,%ymm4,%ymm4
- vpsrlq $26,%ymm1,%ymm6
- vpand %ymm7,%ymm1,%ymm1
- vpaddq %ymm6,%ymm2,%ymm2
- vpaddq %ymm5,%ymm0,%ymm0
- vpsllq $2,%ymm5,%ymm5
- vpsrlq $26,%ymm2,%ymm6
- vpand %ymm7,%ymm2,%ymm2
- vpaddq %ymm5,%ymm0,%ymm0
- vpaddq %ymm6,%ymm3,%ymm3
- vpsrlq $26,%ymm3,%ymm6
- vpsrlq $26,%ymm0,%ymm5
- vpand %ymm7,%ymm0,%ymm0
- vpand %ymm7,%ymm3,%ymm3
- vpaddq %ymm5,%ymm1,%ymm1
- vpaddq %ymm6,%ymm4,%ymm4
- cmpl $0,%ecx
- je .L029done
- vpshufd $252,%xmm0,%xmm0
- leal 288(%esp),%edx
- vpshufd $252,%xmm1,%xmm1
- vpshufd $252,%xmm2,%xmm2
- vpshufd $252,%xmm3,%xmm3
- vpshufd $252,%xmm4,%xmm4
- jmp .L024even
-.align 16
-.L029done:
- vmovd %xmm0,-48(%edi)
- vmovd %xmm1,-44(%edi)
- vmovd %xmm2,-40(%edi)
- vmovd %xmm3,-36(%edi)
- vmovd %xmm4,-32(%edi)
- vzeroupper
- movl %ebp,%esp
-.L020nodata:
- popl %edi
- popl %esi
- popl %ebx
- popl %ebp
- ret
-.size _poly1305_blocks_avx2,.-_poly1305_blocks_avx2
.align 64
.Lconst_sse2:
.long 16777216,0,16777216,0,16777216,0,16777216,0
diff --git a/secure/lib/libcrypto/i386/sha1-586.S b/secure/lib/libcrypto/i386/sha1-586.S
index 7e90e2d9b1d29..49e7482b81613 100644
--- a/secure/lib/libcrypto/i386/sha1-586.S
+++ b/secure/lib/libcrypto/i386/sha1-586.S
@@ -25,11 +25,6 @@ sha1_block_data_order:
jz .L001x86
testl $536870912,%ecx
jnz .Lshaext_shortcut
- andl $268435456,%edx
- andl $1073741824,%eax
- orl %edx,%eax
- cmpl $1342177280,%eax
- je .Lavx_shortcut
jmp .Lssse3_shortcut
.align 16
.L001x86:
@@ -2787,1176 +2782,6 @@ _sha1_block_data_order_ssse3:
popl %ebp
ret
.size _sha1_block_data_order_ssse3,.-_sha1_block_data_order_ssse3
-.type _sha1_block_data_order_avx,@function
-.align 16
-_sha1_block_data_order_avx:
- pushl %ebp
- pushl %ebx
- pushl %esi
- pushl %edi
- call .L008pic_point
-.L008pic_point:
- popl %ebp
- leal .LK_XX_XX-.L008pic_point(%ebp),%ebp
-.Lavx_shortcut:
- vzeroall
- vmovdqa (%ebp),%xmm7
- vmovdqa 16(%ebp),%xmm0
- vmovdqa 32(%ebp),%xmm1
- vmovdqa 48(%ebp),%xmm2
- vmovdqa 64(%ebp),%xmm6
- movl 20(%esp),%edi
- movl 24(%esp),%ebp
- movl 28(%esp),%edx
- movl %esp,%esi
- subl $208,%esp
- andl $-64,%esp
- vmovdqa %xmm0,112(%esp)
- vmovdqa %xmm1,128(%esp)
- vmovdqa %xmm2,144(%esp)
- shll $6,%edx
- vmovdqa %xmm7,160(%esp)
- addl %ebp,%edx
- vmovdqa %xmm6,176(%esp)
- addl $64,%ebp
- movl %edi,192(%esp)
- movl %ebp,196(%esp)
- movl %edx,200(%esp)
- movl %esi,204(%esp)
- movl (%edi),%eax
- movl 4(%edi),%ebx
- movl 8(%edi),%ecx
- movl 12(%edi),%edx
- movl 16(%edi),%edi
- movl %ebx,%esi
- vmovdqu -64(%ebp),%xmm0
- vmovdqu -48(%ebp),%xmm1
- vmovdqu -32(%ebp),%xmm2
- vmovdqu -16(%ebp),%xmm3
- vpshufb %xmm6,%xmm0,%xmm0
- vpshufb %xmm6,%xmm1,%xmm1
- vpshufb %xmm6,%xmm2,%xmm2
- vmovdqa %xmm7,96(%esp)
- vpshufb %xmm6,%xmm3,%xmm3
- vpaddd %xmm7,%xmm0,%xmm4
- vpaddd %xmm7,%xmm1,%xmm5
- vpaddd %xmm7,%xmm2,%xmm6
- vmovdqa %xmm4,(%esp)
- movl %ecx,%ebp
- vmovdqa %xmm5,16(%esp)
- xorl %edx,%ebp
- vmovdqa %xmm6,32(%esp)
- andl %ebp,%esi
- jmp .L009loop
-.align 16
-.L009loop:
- shrdl $2,%ebx,%ebx
- xorl %edx,%esi
- vpalignr $8,%xmm0,%xmm1,%xmm4
- movl %eax,%ebp
- addl (%esp),%edi
- vpaddd %xmm3,%xmm7,%xmm7
- vmovdqa %xmm0,64(%esp)
- xorl %ecx,%ebx
- shldl $5,%eax,%eax
- vpsrldq $4,%xmm3,%xmm6
- addl %esi,%edi
- andl %ebx,%ebp
- vpxor %xmm0,%xmm4,%xmm4
- xorl %ecx,%ebx
- addl %eax,%edi
- vpxor %xmm2,%xmm6,%xmm6
- shrdl $7,%eax,%eax
- xorl %ecx,%ebp
- vmovdqa %xmm7,48(%esp)
- movl %edi,%esi
- addl 4(%esp),%edx
- vpxor %xmm6,%xmm4,%xmm4
- xorl %ebx,%eax
- shldl $5,%edi,%edi
- addl %ebp,%edx
- andl %eax,%esi
- vpsrld $31,%xmm4,%xmm6
- xorl %ebx,%eax
- addl %edi,%edx
- shrdl $7,%edi,%edi
- xorl %ebx,%esi
- vpslldq $12,%xmm4,%xmm0
- vpaddd %xmm4,%xmm4,%xmm4
- movl %edx,%ebp
- addl 8(%esp),%ecx
- xorl %eax,%edi
- shldl $5,%edx,%edx
- vpsrld $30,%xmm0,%xmm7
- vpor %xmm6,%xmm4,%xmm4
- addl %esi,%ecx
- andl %edi,%ebp
- xorl %eax,%edi
- addl %edx,%ecx
- vpslld $2,%xmm0,%xmm0
- shrdl $7,%edx,%edx
- xorl %eax,%ebp
- vpxor %xmm7,%xmm4,%xmm4
- movl %ecx,%esi
- addl 12(%esp),%ebx
- xorl %edi,%edx
- shldl $5,%ecx,%ecx
- vpxor %xmm0,%xmm4,%xmm4
- addl %ebp,%ebx
- andl %edx,%esi
- vmovdqa 96(%esp),%xmm0
- xorl %edi,%edx
- addl %ecx,%ebx
- shrdl $7,%ecx,%ecx
- xorl %edi,%esi
- vpalignr $8,%xmm1,%xmm2,%xmm5
- movl %ebx,%ebp
- addl 16(%esp),%eax
- vpaddd %xmm4,%xmm0,%xmm0
- vmovdqa %xmm1,80(%esp)
- xorl %edx,%ecx
- shldl $5,%ebx,%ebx
- vpsrldq $4,%xmm4,%xmm7
- addl %esi,%eax
- andl %ecx,%ebp
- vpxor %xmm1,%xmm5,%xmm5
- xorl %edx,%ecx
- addl %ebx,%eax
- vpxor %xmm3,%xmm7,%xmm7
- shrdl $7,%ebx,%ebx
- xorl %edx,%ebp
- vmovdqa %xmm0,(%esp)
- movl %eax,%esi
- addl 20(%esp),%edi
- vpxor %xmm7,%xmm5,%xmm5
- xorl %ecx,%ebx
- shldl $5,%eax,%eax
- addl %ebp,%edi
- andl %ebx,%esi
- vpsrld $31,%xmm5,%xmm7
- xorl %ecx,%ebx
- addl %eax,%edi
- shrdl $7,%eax,%eax
- xorl %ecx,%esi
- vpslldq $12,%xmm5,%xmm1
- vpaddd %xmm5,%xmm5,%xmm5
- movl %edi,%ebp
- addl 24(%esp),%edx
- xorl %ebx,%eax
- shldl $5,%edi,%edi
- vpsrld $30,%xmm1,%xmm0
- vpor %xmm7,%xmm5,%xmm5
- addl %esi,%edx
- andl %eax,%ebp
- xorl %ebx,%eax
- addl %edi,%edx
- vpslld $2,%xmm1,%xmm1
- shrdl $7,%edi,%edi
- xorl %ebx,%ebp
- vpxor %xmm0,%xmm5,%xmm5
- movl %edx,%esi
- addl 28(%esp),%ecx
- xorl %eax,%edi
- shldl $5,%edx,%edx
- vpxor %xmm1,%xmm5,%xmm5
- addl %ebp,%ecx
- andl %edi,%esi
- vmovdqa 112(%esp),%xmm1
- xorl %eax,%edi
- addl %edx,%ecx
- shrdl $7,%edx,%edx
- xorl %eax,%esi
- vpalignr $8,%xmm2,%xmm3,%xmm6
- movl %ecx,%ebp
- addl 32(%esp),%ebx
- vpaddd %xmm5,%xmm1,%xmm1
- vmovdqa %xmm2,96(%esp)
- xorl %edi,%edx
- shldl $5,%ecx,%ecx
- vpsrldq $4,%xmm5,%xmm0
- addl %esi,%ebx
- andl %edx,%ebp
- vpxor %xmm2,%xmm6,%xmm6
- xorl %edi,%edx
- addl %ecx,%ebx
- vpxor %xmm4,%xmm0,%xmm0
- shrdl $7,%ecx,%ecx
- xorl %edi,%ebp
- vmovdqa %xmm1,16(%esp)
- movl %ebx,%esi
- addl 36(%esp),%eax
- vpxor %xmm0,%xmm6,%xmm6
- xorl %edx,%ecx
- shldl $5,%ebx,%ebx
- addl %ebp,%eax
- andl %ecx,%esi
- vpsrld $31,%xmm6,%xmm0
- xorl %edx,%ecx
- addl %ebx,%eax
- shrdl $7,%ebx,%ebx
- xorl %edx,%esi
- vpslldq $12,%xmm6,%xmm2
- vpaddd %xmm6,%xmm6,%xmm6
- movl %eax,%ebp
- addl 40(%esp),%edi
- xorl %ecx,%ebx
- shldl $5,%eax,%eax
- vpsrld $30,%xmm2,%xmm1
- vpor %xmm0,%xmm6,%xmm6
- addl %esi,%edi
- andl %ebx,%ebp
- xorl %ecx,%ebx
- addl %eax,%edi
- vpslld $2,%xmm2,%xmm2
- vmovdqa 64(%esp),%xmm0
- shrdl $7,%eax,%eax
- xorl %ecx,%ebp
- vpxor %xmm1,%xmm6,%xmm6
- movl %edi,%esi
- addl 44(%esp),%edx
- xorl %ebx,%eax
- shldl $5,%edi,%edi
- vpxor %xmm2,%xmm6,%xmm6
- addl %ebp,%edx
- andl %eax,%esi
- vmovdqa 112(%esp),%xmm2
- xorl %ebx,%eax
- addl %edi,%edx
- shrdl $7,%edi,%edi
- xorl %ebx,%esi
- vpalignr $8,%xmm3,%xmm4,%xmm7
- movl %edx,%ebp
- addl 48(%esp),%ecx
- vpaddd %xmm6,%xmm2,%xmm2
- vmovdqa %xmm3,64(%esp)
- xorl %eax,%edi
- shldl $5,%edx,%edx
- vpsrldq $4,%xmm6,%xmm1
- addl %esi,%ecx
- andl %edi,%ebp
- vpxor %xmm3,%xmm7,%xmm7
- xorl %eax,%edi
- addl %edx,%ecx
- vpxor %xmm5,%xmm1,%xmm1
- shrdl $7,%edx,%edx
- xorl %eax,%ebp
- vmovdqa %xmm2,32(%esp)
- movl %ecx,%esi
- addl 52(%esp),%ebx
- vpxor %xmm1,%xmm7,%xmm7
- xorl %edi,%edx
- shldl $5,%ecx,%ecx
- addl %ebp,%ebx
- andl %edx,%esi
- vpsrld $31,%xmm7,%xmm1
- xorl %edi,%edx
- addl %ecx,%ebx
- shrdl $7,%ecx,%ecx
- xorl %edi,%esi
- vpslldq $12,%xmm7,%xmm3
- vpaddd %xmm7,%xmm7,%xmm7
- movl %ebx,%ebp
- addl 56(%esp),%eax
- xorl %edx,%ecx
- shldl $5,%ebx,%ebx
- vpsrld $30,%xmm3,%xmm2
- vpor %xmm1,%xmm7,%xmm7
- addl %esi,%eax
- andl %ecx,%ebp
- xorl %edx,%ecx
- addl %ebx,%eax
- vpslld $2,%xmm3,%xmm3
- vmovdqa 80(%esp),%xmm1
- shrdl $7,%ebx,%ebx
- xorl %edx,%ebp
- vpxor %xmm2,%xmm7,%xmm7
- movl %eax,%esi
- addl 60(%esp),%edi
- xorl %ecx,%ebx
- shldl $5,%eax,%eax
- vpxor %xmm3,%xmm7,%xmm7
- addl %ebp,%edi
- andl %ebx,%esi
- vmovdqa 112(%esp),%xmm3
- xorl %ecx,%ebx
- addl %eax,%edi
- vpalignr $8,%xmm6,%xmm7,%xmm2
- vpxor %xmm4,%xmm0,%xmm0
- shrdl $7,%eax,%eax
- xorl %ecx,%esi
- movl %edi,%ebp
- addl (%esp),%edx
- vpxor %xmm1,%xmm0,%xmm0
- vmovdqa %xmm4,80(%esp)
- xorl %ebx,%eax
- shldl $5,%edi,%edi
- vmovdqa %xmm3,%xmm4
- vpaddd %xmm7,%xmm3,%xmm3
- addl %esi,%edx
- andl %eax,%ebp
- vpxor %xmm2,%xmm0,%xmm0
- xorl %ebx,%eax
- addl %edi,%edx
- shrdl $7,%edi,%edi
- xorl %ebx,%ebp
- vpsrld $30,%xmm0,%xmm2
- vmovdqa %xmm3,48(%esp)
- movl %edx,%esi
- addl 4(%esp),%ecx
- xorl %eax,%edi
- shldl $5,%edx,%edx
- vpslld $2,%xmm0,%xmm0
- addl %ebp,%ecx
- andl %edi,%esi
- xorl %eax,%edi
- addl %edx,%ecx
- shrdl $7,%edx,%edx
- xorl %eax,%esi
- movl %ecx,%ebp
- addl 8(%esp),%ebx
- vpor %xmm2,%xmm0,%xmm0
- xorl %edi,%edx
- shldl $5,%ecx,%ecx
- vmovdqa 96(%esp),%xmm2
- addl %esi,%ebx
- andl %edx,%ebp
- xorl %edi,%edx
- addl %ecx,%ebx
- addl 12(%esp),%eax
- xorl %edi,%ebp
- movl %ebx,%esi
- shldl $5,%ebx,%ebx
- addl %ebp,%eax
- xorl %edx,%esi
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- vpalignr $8,%xmm7,%xmm0,%xmm3
- vpxor %xmm5,%xmm1,%xmm1
- addl 16(%esp),%edi
- xorl %ecx,%esi
- movl %eax,%ebp
- shldl $5,%eax,%eax
- vpxor %xmm2,%xmm1,%xmm1
- vmovdqa %xmm5,96(%esp)
- addl %esi,%edi
- xorl %ecx,%ebp
- vmovdqa %xmm4,%xmm5
- vpaddd %xmm0,%xmm4,%xmm4
- shrdl $7,%ebx,%ebx
- addl %eax,%edi
- vpxor %xmm3,%xmm1,%xmm1
- addl 20(%esp),%edx
- xorl %ebx,%ebp
- movl %edi,%esi
- shldl $5,%edi,%edi
- vpsrld $30,%xmm1,%xmm3
- vmovdqa %xmm4,(%esp)
- addl %ebp,%edx
- xorl %ebx,%esi
- shrdl $7,%eax,%eax
- addl %edi,%edx
- vpslld $2,%xmm1,%xmm1
- addl 24(%esp),%ecx
- xorl %eax,%esi
- movl %edx,%ebp
- shldl $5,%edx,%edx
- addl %esi,%ecx
- xorl %eax,%ebp
- shrdl $7,%edi,%edi
- addl %edx,%ecx
- vpor %xmm3,%xmm1,%xmm1
- addl 28(%esp),%ebx
- xorl %edi,%ebp
- vmovdqa 64(%esp),%xmm3
- movl %ecx,%esi
- shldl $5,%ecx,%ecx
- addl %ebp,%ebx
- xorl %edi,%esi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- vpalignr $8,%xmm0,%xmm1,%xmm4
- vpxor %xmm6,%xmm2,%xmm2
- addl 32(%esp),%eax
- xorl %edx,%esi
- movl %ebx,%ebp
- shldl $5,%ebx,%ebx
- vpxor %xmm3,%xmm2,%xmm2
- vmovdqa %xmm6,64(%esp)
- addl %esi,%eax
- xorl %edx,%ebp
- vmovdqa 128(%esp),%xmm6
- vpaddd %xmm1,%xmm5,%xmm5
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- vpxor %xmm4,%xmm2,%xmm2
- addl 36(%esp),%edi
- xorl %ecx,%ebp
- movl %eax,%esi
- shldl $5,%eax,%eax
- vpsrld $30,%xmm2,%xmm4
- vmovdqa %xmm5,16(%esp)
- addl %ebp,%edi
- xorl %ecx,%esi
- shrdl $7,%ebx,%ebx
- addl %eax,%edi
- vpslld $2,%xmm2,%xmm2
- addl 40(%esp),%edx
- xorl %ebx,%esi
- movl %edi,%ebp
- shldl $5,%edi,%edi
- addl %esi,%edx
- xorl %ebx,%ebp
- shrdl $7,%eax,%eax
- addl %edi,%edx
- vpor %xmm4,%xmm2,%xmm2
- addl 44(%esp),%ecx
- xorl %eax,%ebp
- vmovdqa 80(%esp),%xmm4
- movl %edx,%esi
- shldl $5,%edx,%edx
- addl %ebp,%ecx
- xorl %eax,%esi
- shrdl $7,%edi,%edi
- addl %edx,%ecx
- vpalignr $8,%xmm1,%xmm2,%xmm5
- vpxor %xmm7,%xmm3,%xmm3
- addl 48(%esp),%ebx
- xorl %edi,%esi
- movl %ecx,%ebp
- shldl $5,%ecx,%ecx
- vpxor %xmm4,%xmm3,%xmm3
- vmovdqa %xmm7,80(%esp)
- addl %esi,%ebx
- xorl %edi,%ebp
- vmovdqa %xmm6,%xmm7
- vpaddd %xmm2,%xmm6,%xmm6
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- vpxor %xmm5,%xmm3,%xmm3
- addl 52(%esp),%eax
- xorl %edx,%ebp
- movl %ebx,%esi
- shldl $5,%ebx,%ebx
- vpsrld $30,%xmm3,%xmm5
- vmovdqa %xmm6,32(%esp)
- addl %ebp,%eax
- xorl %edx,%esi
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- vpslld $2,%xmm3,%xmm3
- addl 56(%esp),%edi
- xorl %ecx,%esi
- movl %eax,%ebp
- shldl $5,%eax,%eax
- addl %esi,%edi
- xorl %ecx,%ebp
- shrdl $7,%ebx,%ebx
- addl %eax,%edi
- vpor %xmm5,%xmm3,%xmm3
- addl 60(%esp),%edx
- xorl %ebx,%ebp
- vmovdqa 96(%esp),%xmm5
- movl %edi,%esi
- shldl $5,%edi,%edi
- addl %ebp,%edx
- xorl %ebx,%esi
- shrdl $7,%eax,%eax
- addl %edi,%edx
- vpalignr $8,%xmm2,%xmm3,%xmm6
- vpxor %xmm0,%xmm4,%xmm4
- addl (%esp),%ecx
- xorl %eax,%esi
- movl %edx,%ebp
- shldl $5,%edx,%edx
- vpxor %xmm5,%xmm4,%xmm4
- vmovdqa %xmm0,96(%esp)
- addl %esi,%ecx
- xorl %eax,%ebp
- vmovdqa %xmm7,%xmm0
- vpaddd %xmm3,%xmm7,%xmm7
- shrdl $7,%edi,%edi
- addl %edx,%ecx
- vpxor %xmm6,%xmm4,%xmm4
- addl 4(%esp),%ebx
- xorl %edi,%ebp
- movl %ecx,%esi
- shldl $5,%ecx,%ecx
- vpsrld $30,%xmm4,%xmm6
- vmovdqa %xmm7,48(%esp)
- addl %ebp,%ebx
- xorl %edi,%esi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- vpslld $2,%xmm4,%xmm4
- addl 8(%esp),%eax
- xorl %edx,%esi
- movl %ebx,%ebp
- shldl $5,%ebx,%ebx
- addl %esi,%eax
- xorl %edx,%ebp
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- vpor %xmm6,%xmm4,%xmm4
- addl 12(%esp),%edi
- xorl %ecx,%ebp
- vmovdqa 64(%esp),%xmm6
- movl %eax,%esi
- shldl $5,%eax,%eax
- addl %ebp,%edi
- xorl %ecx,%esi
- shrdl $7,%ebx,%ebx
- addl %eax,%edi
- vpalignr $8,%xmm3,%xmm4,%xmm7
- vpxor %xmm1,%xmm5,%xmm5
- addl 16(%esp),%edx
- xorl %ebx,%esi
- movl %edi,%ebp
- shldl $5,%edi,%edi
- vpxor %xmm6,%xmm5,%xmm5
- vmovdqa %xmm1,64(%esp)
- addl %esi,%edx
- xorl %ebx,%ebp
- vmovdqa %xmm0,%xmm1
- vpaddd %xmm4,%xmm0,%xmm0
- shrdl $7,%eax,%eax
- addl %edi,%edx
- vpxor %xmm7,%xmm5,%xmm5
- addl 20(%esp),%ecx
- xorl %eax,%ebp
- movl %edx,%esi
- shldl $5,%edx,%edx
- vpsrld $30,%xmm5,%xmm7
- vmovdqa %xmm0,(%esp)
- addl %ebp,%ecx
- xorl %eax,%esi
- shrdl $7,%edi,%edi
- addl %edx,%ecx
- vpslld $2,%xmm5,%xmm5
- addl 24(%esp),%ebx
- xorl %edi,%esi
- movl %ecx,%ebp
- shldl $5,%ecx,%ecx
- addl %esi,%ebx
- xorl %edi,%ebp
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- vpor %xmm7,%xmm5,%xmm5
- addl 28(%esp),%eax
- vmovdqa 80(%esp),%xmm7
- shrdl $7,%ecx,%ecx
- movl %ebx,%esi
- xorl %edx,%ebp
- shldl $5,%ebx,%ebx
- addl %ebp,%eax
- xorl %ecx,%esi
- xorl %edx,%ecx
- addl %ebx,%eax
- vpalignr $8,%xmm4,%xmm5,%xmm0
- vpxor %xmm2,%xmm6,%xmm6
- addl 32(%esp),%edi
- andl %ecx,%esi
- xorl %edx,%ecx
- shrdl $7,%ebx,%ebx
- vpxor %xmm7,%xmm6,%xmm6
- vmovdqa %xmm2,80(%esp)
- movl %eax,%ebp
- xorl %ecx,%esi
- vmovdqa %xmm1,%xmm2
- vpaddd %xmm5,%xmm1,%xmm1
- shldl $5,%eax,%eax
- addl %esi,%edi
- vpxor %xmm0,%xmm6,%xmm6
- xorl %ebx,%ebp
- xorl %ecx,%ebx
- addl %eax,%edi
- addl 36(%esp),%edx
- vpsrld $30,%xmm6,%xmm0
- vmovdqa %xmm1,16(%esp)
- andl %ebx,%ebp
- xorl %ecx,%ebx
- shrdl $7,%eax,%eax
- movl %edi,%esi
- vpslld $2,%xmm6,%xmm6
- xorl %ebx,%ebp
- shldl $5,%edi,%edi
- addl %ebp,%edx
- xorl %eax,%esi
- xorl %ebx,%eax
- addl %edi,%edx
- addl 40(%esp),%ecx
- andl %eax,%esi
- vpor %xmm0,%xmm6,%xmm6
- xorl %ebx,%eax
- shrdl $7,%edi,%edi
- vmovdqa 96(%esp),%xmm0
- movl %edx,%ebp
- xorl %eax,%esi
- shldl $5,%edx,%edx
- addl %esi,%ecx
- xorl %edi,%ebp
- xorl %eax,%edi
- addl %edx,%ecx
- addl 44(%esp),%ebx
- andl %edi,%ebp
- xorl %eax,%edi
- shrdl $7,%edx,%edx
- movl %ecx,%esi
- xorl %edi,%ebp
- shldl $5,%ecx,%ecx
- addl %ebp,%ebx
- xorl %edx,%esi
- xorl %edi,%edx
- addl %ecx,%ebx
- vpalignr $8,%xmm5,%xmm6,%xmm1
- vpxor %xmm3,%xmm7,%xmm7
- addl 48(%esp),%eax
- andl %edx,%esi
- xorl %edi,%edx
- shrdl $7,%ecx,%ecx
- vpxor %xmm0,%xmm7,%xmm7
- vmovdqa %xmm3,96(%esp)
- movl %ebx,%ebp
- xorl %edx,%esi
- vmovdqa 144(%esp),%xmm3
- vpaddd %xmm6,%xmm2,%xmm2
- shldl $5,%ebx,%ebx
- addl %esi,%eax
- vpxor %xmm1,%xmm7,%xmm7
- xorl %ecx,%ebp
- xorl %edx,%ecx
- addl %ebx,%eax
- addl 52(%esp),%edi
- vpsrld $30,%xmm7,%xmm1
- vmovdqa %xmm2,32(%esp)
- andl %ecx,%ebp
- xorl %edx,%ecx
- shrdl $7,%ebx,%ebx
- movl %eax,%esi
- vpslld $2,%xmm7,%xmm7
- xorl %ecx,%ebp
- shldl $5,%eax,%eax
- addl %ebp,%edi
- xorl %ebx,%esi
- xorl %ecx,%ebx
- addl %eax,%edi
- addl 56(%esp),%edx
- andl %ebx,%esi
- vpor %xmm1,%xmm7,%xmm7
- xorl %ecx,%ebx
- shrdl $7,%eax,%eax
- vmovdqa 64(%esp),%xmm1
- movl %edi,%ebp
- xorl %ebx,%esi
- shldl $5,%edi,%edi
- addl %esi,%edx
- xorl %eax,%ebp
- xorl %ebx,%eax
- addl %edi,%edx
- addl 60(%esp),%ecx
- andl %eax,%ebp
- xorl %ebx,%eax
- shrdl $7,%edi,%edi
- movl %edx,%esi
- xorl %eax,%ebp
- shldl $5,%edx,%edx
- addl %ebp,%ecx
- xorl %edi,%esi
- xorl %eax,%edi
- addl %edx,%ecx
- vpalignr $8,%xmm6,%xmm7,%xmm2
- vpxor %xmm4,%xmm0,%xmm0
- addl (%esp),%ebx
- andl %edi,%esi
- xorl %eax,%edi
- shrdl $7,%edx,%edx
- vpxor %xmm1,%xmm0,%xmm0
- vmovdqa %xmm4,64(%esp)
- movl %ecx,%ebp
- xorl %edi,%esi
- vmovdqa %xmm3,%xmm4
- vpaddd %xmm7,%xmm3,%xmm3
- shldl $5,%ecx,%ecx
- addl %esi,%ebx
- vpxor %xmm2,%xmm0,%xmm0
- xorl %edx,%ebp
- xorl %edi,%edx
- addl %ecx,%ebx
- addl 4(%esp),%eax
- vpsrld $30,%xmm0,%xmm2
- vmovdqa %xmm3,48(%esp)
- andl %edx,%ebp
- xorl %edi,%edx
- shrdl $7,%ecx,%ecx
- movl %ebx,%esi
- vpslld $2,%xmm0,%xmm0
- xorl %edx,%ebp
- shldl $5,%ebx,%ebx
- addl %ebp,%eax
- xorl %ecx,%esi
- xorl %edx,%ecx
- addl %ebx,%eax
- addl 8(%esp),%edi
- andl %ecx,%esi
- vpor %xmm2,%xmm0,%xmm0
- xorl %edx,%ecx
- shrdl $7,%ebx,%ebx
- vmovdqa 80(%esp),%xmm2
- movl %eax,%ebp
- xorl %ecx,%esi
- shldl $5,%eax,%eax
- addl %esi,%edi
- xorl %ebx,%ebp
- xorl %ecx,%ebx
- addl %eax,%edi
- addl 12(%esp),%edx
- andl %ebx,%ebp
- xorl %ecx,%ebx
- shrdl $7,%eax,%eax
- movl %edi,%esi
- xorl %ebx,%ebp
- shldl $5,%edi,%edi
- addl %ebp,%edx
- xorl %eax,%esi
- xorl %ebx,%eax
- addl %edi,%edx
- vpalignr $8,%xmm7,%xmm0,%xmm3
- vpxor %xmm5,%xmm1,%xmm1
- addl 16(%esp),%ecx
- andl %eax,%esi
- xorl %ebx,%eax
- shrdl $7,%edi,%edi
- vpxor %xmm2,%xmm1,%xmm1
- vmovdqa %xmm5,80(%esp)
- movl %edx,%ebp
- xorl %eax,%esi
- vmovdqa %xmm4,%xmm5
- vpaddd %xmm0,%xmm4,%xmm4
- shldl $5,%edx,%edx
- addl %esi,%ecx
- vpxor %xmm3,%xmm1,%xmm1
- xorl %edi,%ebp
- xorl %eax,%edi
- addl %edx,%ecx
- addl 20(%esp),%ebx
- vpsrld $30,%xmm1,%xmm3
- vmovdqa %xmm4,(%esp)
- andl %edi,%ebp
- xorl %eax,%edi
- shrdl $7,%edx,%edx
- movl %ecx,%esi
- vpslld $2,%xmm1,%xmm1
- xorl %edi,%ebp
- shldl $5,%ecx,%ecx
- addl %ebp,%ebx
- xorl %edx,%esi
- xorl %edi,%edx
- addl %ecx,%ebx
- addl 24(%esp),%eax
- andl %edx,%esi
- vpor %xmm3,%xmm1,%xmm1
- xorl %edi,%edx
- shrdl $7,%ecx,%ecx
- vmovdqa 96(%esp),%xmm3
- movl %ebx,%ebp
- xorl %edx,%esi
- shldl $5,%ebx,%ebx
- addl %esi,%eax
- xorl %ecx,%ebp
- xorl %edx,%ecx
- addl %ebx,%eax
- addl 28(%esp),%edi
- andl %ecx,%ebp
- xorl %edx,%ecx
- shrdl $7,%ebx,%ebx
- movl %eax,%esi
- xorl %ecx,%ebp
- shldl $5,%eax,%eax
- addl %ebp,%edi
- xorl %ebx,%esi
- xorl %ecx,%ebx
- addl %eax,%edi
- vpalignr $8,%xmm0,%xmm1,%xmm4
- vpxor %xmm6,%xmm2,%xmm2
- addl 32(%esp),%edx
- andl %ebx,%esi
- xorl %ecx,%ebx
- shrdl $7,%eax,%eax
- vpxor %xmm3,%xmm2,%xmm2
- vmovdqa %xmm6,96(%esp)
- movl %edi,%ebp
- xorl %ebx,%esi
- vmovdqa %xmm5,%xmm6
- vpaddd %xmm1,%xmm5,%xmm5
- shldl $5,%edi,%edi
- addl %esi,%edx
- vpxor %xmm4,%xmm2,%xmm2
- xorl %eax,%ebp
- xorl %ebx,%eax
- addl %edi,%edx
- addl 36(%esp),%ecx
- vpsrld $30,%xmm2,%xmm4
- vmovdqa %xmm5,16(%esp)
- andl %eax,%ebp
- xorl %ebx,%eax
- shrdl $7,%edi,%edi
- movl %edx,%esi
- vpslld $2,%xmm2,%xmm2
- xorl %eax,%ebp
- shldl $5,%edx,%edx
- addl %ebp,%ecx
- xorl %edi,%esi
- xorl %eax,%edi
- addl %edx,%ecx
- addl 40(%esp),%ebx
- andl %edi,%esi
- vpor %xmm4,%xmm2,%xmm2
- xorl %eax,%edi
- shrdl $7,%edx,%edx
- vmovdqa 64(%esp),%xmm4
- movl %ecx,%ebp
- xorl %edi,%esi
- shldl $5,%ecx,%ecx
- addl %esi,%ebx
- xorl %edx,%ebp
- xorl %edi,%edx
- addl %ecx,%ebx
- addl 44(%esp),%eax
- andl %edx,%ebp
- xorl %edi,%edx
- shrdl $7,%ecx,%ecx
- movl %ebx,%esi
- xorl %edx,%ebp
- shldl $5,%ebx,%ebx
- addl %ebp,%eax
- xorl %edx,%esi
- addl %ebx,%eax
- vpalignr $8,%xmm1,%xmm2,%xmm5
- vpxor %xmm7,%xmm3,%xmm3
- addl 48(%esp),%edi
- xorl %ecx,%esi
- movl %eax,%ebp
- shldl $5,%eax,%eax
- vpxor %xmm4,%xmm3,%xmm3
- vmovdqa %xmm7,64(%esp)
- addl %esi,%edi
- xorl %ecx,%ebp
- vmovdqa %xmm6,%xmm7
- vpaddd %xmm2,%xmm6,%xmm6
- shrdl $7,%ebx,%ebx
- addl %eax,%edi
- vpxor %xmm5,%xmm3,%xmm3
- addl 52(%esp),%edx
- xorl %ebx,%ebp
- movl %edi,%esi
- shldl $5,%edi,%edi
- vpsrld $30,%xmm3,%xmm5
- vmovdqa %xmm6,32(%esp)
- addl %ebp,%edx
- xorl %ebx,%esi
- shrdl $7,%eax,%eax
- addl %edi,%edx
- vpslld $2,%xmm3,%xmm3
- addl 56(%esp),%ecx
- xorl %eax,%esi
- movl %edx,%ebp
- shldl $5,%edx,%edx
- addl %esi,%ecx
- xorl %eax,%ebp
- shrdl $7,%edi,%edi
- addl %edx,%ecx
- vpor %xmm5,%xmm3,%xmm3
- addl 60(%esp),%ebx
- xorl %edi,%ebp
- movl %ecx,%esi
- shldl $5,%ecx,%ecx
- addl %ebp,%ebx
- xorl %edi,%esi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- addl (%esp),%eax
- vpaddd %xmm3,%xmm7,%xmm7
- xorl %edx,%esi
- movl %ebx,%ebp
- shldl $5,%ebx,%ebx
- addl %esi,%eax
- vmovdqa %xmm7,48(%esp)
- xorl %edx,%ebp
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- addl 4(%esp),%edi
- xorl %ecx,%ebp
- movl %eax,%esi
- shldl $5,%eax,%eax
- addl %ebp,%edi
- xorl %ecx,%esi
- shrdl $7,%ebx,%ebx
- addl %eax,%edi
- addl 8(%esp),%edx
- xorl %ebx,%esi
- movl %edi,%ebp
- shldl $5,%edi,%edi
- addl %esi,%edx
- xorl %ebx,%ebp
- shrdl $7,%eax,%eax
- addl %edi,%edx
- addl 12(%esp),%ecx
- xorl %eax,%ebp
- movl %edx,%esi
- shldl $5,%edx,%edx
- addl %ebp,%ecx
- xorl %eax,%esi
- shrdl $7,%edi,%edi
- addl %edx,%ecx
- movl 196(%esp),%ebp
- cmpl 200(%esp),%ebp
- je .L010done
- vmovdqa 160(%esp),%xmm7
- vmovdqa 176(%esp),%xmm6
- vmovdqu (%ebp),%xmm0
- vmovdqu 16(%ebp),%xmm1
- vmovdqu 32(%ebp),%xmm2
- vmovdqu 48(%ebp),%xmm3
- addl $64,%ebp
- vpshufb %xmm6,%xmm0,%xmm0
- movl %ebp,196(%esp)
- vmovdqa %xmm7,96(%esp)
- addl 16(%esp),%ebx
- xorl %edi,%esi
- vpshufb %xmm6,%xmm1,%xmm1
- movl %ecx,%ebp
- shldl $5,%ecx,%ecx
- vpaddd %xmm7,%xmm0,%xmm4
- addl %esi,%ebx
- xorl %edi,%ebp
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- vmovdqa %xmm4,(%esp)
- addl 20(%esp),%eax
- xorl %edx,%ebp
- movl %ebx,%esi
- shldl $5,%ebx,%ebx
- addl %ebp,%eax
- xorl %edx,%esi
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- addl 24(%esp),%edi
- xorl %ecx,%esi
- movl %eax,%ebp
- shldl $5,%eax,%eax
- addl %esi,%edi
- xorl %ecx,%ebp
- shrdl $7,%ebx,%ebx
- addl %eax,%edi
- addl 28(%esp),%edx
- xorl %ebx,%ebp
- movl %edi,%esi
- shldl $5,%edi,%edi
- addl %ebp,%edx
- xorl %ebx,%esi
- shrdl $7,%eax,%eax
- addl %edi,%edx
- addl 32(%esp),%ecx
- xorl %eax,%esi
- vpshufb %xmm6,%xmm2,%xmm2
- movl %edx,%ebp
- shldl $5,%edx,%edx
- vpaddd %xmm7,%xmm1,%xmm5
- addl %esi,%ecx
- xorl %eax,%ebp
- shrdl $7,%edi,%edi
- addl %edx,%ecx
- vmovdqa %xmm5,16(%esp)
- addl 36(%esp),%ebx
- xorl %edi,%ebp
- movl %ecx,%esi
- shldl $5,%ecx,%ecx
- addl %ebp,%ebx
- xorl %edi,%esi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- addl 40(%esp),%eax
- xorl %edx,%esi
- movl %ebx,%ebp
- shldl $5,%ebx,%ebx
- addl %esi,%eax
- xorl %edx,%ebp
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- addl 44(%esp),%edi
- xorl %ecx,%ebp
- movl %eax,%esi
- shldl $5,%eax,%eax
- addl %ebp,%edi
- xorl %ecx,%esi
- shrdl $7,%ebx,%ebx
- addl %eax,%edi
- addl 48(%esp),%edx
- xorl %ebx,%esi
- vpshufb %xmm6,%xmm3,%xmm3
- movl %edi,%ebp
- shldl $5,%edi,%edi
- vpaddd %xmm7,%xmm2,%xmm6
- addl %esi,%edx
- xorl %ebx,%ebp
- shrdl $7,%eax,%eax
- addl %edi,%edx
- vmovdqa %xmm6,32(%esp)
- addl 52(%esp),%ecx
- xorl %eax,%ebp
- movl %edx,%esi
- shldl $5,%edx,%edx
- addl %ebp,%ecx
- xorl %eax,%esi
- shrdl $7,%edi,%edi
- addl %edx,%ecx
- addl 56(%esp),%ebx
- xorl %edi,%esi
- movl %ecx,%ebp
- shldl $5,%ecx,%ecx
- addl %esi,%ebx
- xorl %edi,%ebp
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- addl 60(%esp),%eax
- xorl %edx,%ebp
- movl %ebx,%esi
- shldl $5,%ebx,%ebx
- addl %ebp,%eax
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- movl 192(%esp),%ebp
- addl (%ebp),%eax
- addl 4(%ebp),%esi
- addl 8(%ebp),%ecx
- movl %eax,(%ebp)
- addl 12(%ebp),%edx
- movl %esi,4(%ebp)
- addl 16(%ebp),%edi
- movl %ecx,%ebx
- movl %ecx,8(%ebp)
- xorl %edx,%ebx
- movl %edx,12(%ebp)
- movl %edi,16(%ebp)
- movl %esi,%ebp
- andl %ebx,%esi
- movl %ebp,%ebx
- jmp .L009loop
-.align 16
-.L010done:
- addl 16(%esp),%ebx
- xorl %edi,%esi
- movl %ecx,%ebp
- shldl $5,%ecx,%ecx
- addl %esi,%ebx
- xorl %edi,%ebp
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- addl 20(%esp),%eax
- xorl %edx,%ebp
- movl %ebx,%esi
- shldl $5,%ebx,%ebx
- addl %ebp,%eax
- xorl %edx,%esi
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- addl 24(%esp),%edi
- xorl %ecx,%esi
- movl %eax,%ebp
- shldl $5,%eax,%eax
- addl %esi,%edi
- xorl %ecx,%ebp
- shrdl $7,%ebx,%ebx
- addl %eax,%edi
- addl 28(%esp),%edx
- xorl %ebx,%ebp
- movl %edi,%esi
- shldl $5,%edi,%edi
- addl %ebp,%edx
- xorl %ebx,%esi
- shrdl $7,%eax,%eax
- addl %edi,%edx
- addl 32(%esp),%ecx
- xorl %eax,%esi
- movl %edx,%ebp
- shldl $5,%edx,%edx
- addl %esi,%ecx
- xorl %eax,%ebp
- shrdl $7,%edi,%edi
- addl %edx,%ecx
- addl 36(%esp),%ebx
- xorl %edi,%ebp
- movl %ecx,%esi
- shldl $5,%ecx,%ecx
- addl %ebp,%ebx
- xorl %edi,%esi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- addl 40(%esp),%eax
- xorl %edx,%esi
- movl %ebx,%ebp
- shldl $5,%ebx,%ebx
- addl %esi,%eax
- xorl %edx,%ebp
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- addl 44(%esp),%edi
- xorl %ecx,%ebp
- movl %eax,%esi
- shldl $5,%eax,%eax
- addl %ebp,%edi
- xorl %ecx,%esi
- shrdl $7,%ebx,%ebx
- addl %eax,%edi
- addl 48(%esp),%edx
- xorl %ebx,%esi
- movl %edi,%ebp
- shldl $5,%edi,%edi
- addl %esi,%edx
- xorl %ebx,%ebp
- shrdl $7,%eax,%eax
- addl %edi,%edx
- addl 52(%esp),%ecx
- xorl %eax,%ebp
- movl %edx,%esi
- shldl $5,%edx,%edx
- addl %ebp,%ecx
- xorl %eax,%esi
- shrdl $7,%edi,%edi
- addl %edx,%ecx
- addl 56(%esp),%ebx
- xorl %edi,%esi
- movl %ecx,%ebp
- shldl $5,%ecx,%ecx
- addl %esi,%ebx
- xorl %edi,%ebp
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- addl 60(%esp),%eax
- xorl %edx,%ebp
- movl %ebx,%esi
- shldl $5,%ebx,%ebx
- addl %ebp,%eax
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- vzeroall
- movl 192(%esp),%ebp
- addl (%ebp),%eax
- movl 204(%esp),%esp
- addl 4(%ebp),%esi
- addl 8(%ebp),%ecx
- movl %eax,(%ebp)
- addl 12(%ebp),%edx
- movl %esi,4(%ebp)
- addl 16(%ebp),%edi
- movl %ecx,8(%ebp)
- movl %edx,12(%ebp)
- movl %edi,16(%ebp)
- popl %edi
- popl %esi
- popl %ebx
- popl %ebp
- ret
-.size _sha1_block_data_order_avx,.-_sha1_block_data_order_avx
.align 64
.LK_XX_XX:
.long 1518500249,1518500249,1518500249,1518500249
@@ -3995,11 +2820,6 @@ sha1_block_data_order:
jz .L001x86
testl $536870912,%ecx
jnz .Lshaext_shortcut
- andl $268435456,%edx
- andl $1073741824,%eax
- orl %edx,%eax
- cmpl $1342177280,%eax
- je .Lavx_shortcut
jmp .Lssse3_shortcut
.align 16
.L001x86:
@@ -6757,1176 +5577,6 @@ _sha1_block_data_order_ssse3:
popl %ebp
ret
.size _sha1_block_data_order_ssse3,.-_sha1_block_data_order_ssse3
-.type _sha1_block_data_order_avx,@function
-.align 16
-_sha1_block_data_order_avx:
- pushl %ebp
- pushl %ebx
- pushl %esi
- pushl %edi
- call .L008pic_point
-.L008pic_point:
- popl %ebp
- leal .LK_XX_XX-.L008pic_point(%ebp),%ebp
-.Lavx_shortcut:
- vzeroall
- vmovdqa (%ebp),%xmm7
- vmovdqa 16(%ebp),%xmm0
- vmovdqa 32(%ebp),%xmm1
- vmovdqa 48(%ebp),%xmm2
- vmovdqa 64(%ebp),%xmm6
- movl 20(%esp),%edi
- movl 24(%esp),%ebp
- movl 28(%esp),%edx
- movl %esp,%esi
- subl $208,%esp
- andl $-64,%esp
- vmovdqa %xmm0,112(%esp)
- vmovdqa %xmm1,128(%esp)
- vmovdqa %xmm2,144(%esp)
- shll $6,%edx
- vmovdqa %xmm7,160(%esp)
- addl %ebp,%edx
- vmovdqa %xmm6,176(%esp)
- addl $64,%ebp
- movl %edi,192(%esp)
- movl %ebp,196(%esp)
- movl %edx,200(%esp)
- movl %esi,204(%esp)
- movl (%edi),%eax
- movl 4(%edi),%ebx
- movl 8(%edi),%ecx
- movl 12(%edi),%edx
- movl 16(%edi),%edi
- movl %ebx,%esi
- vmovdqu -64(%ebp),%xmm0
- vmovdqu -48(%ebp),%xmm1
- vmovdqu -32(%ebp),%xmm2
- vmovdqu -16(%ebp),%xmm3
- vpshufb %xmm6,%xmm0,%xmm0
- vpshufb %xmm6,%xmm1,%xmm1
- vpshufb %xmm6,%xmm2,%xmm2
- vmovdqa %xmm7,96(%esp)
- vpshufb %xmm6,%xmm3,%xmm3
- vpaddd %xmm7,%xmm0,%xmm4
- vpaddd %xmm7,%xmm1,%xmm5
- vpaddd %xmm7,%xmm2,%xmm6
- vmovdqa %xmm4,(%esp)
- movl %ecx,%ebp
- vmovdqa %xmm5,16(%esp)
- xorl %edx,%ebp
- vmovdqa %xmm6,32(%esp)
- andl %ebp,%esi
- jmp .L009loop
-.align 16
-.L009loop:
- shrdl $2,%ebx,%ebx
- xorl %edx,%esi
- vpalignr $8,%xmm0,%xmm1,%xmm4
- movl %eax,%ebp
- addl (%esp),%edi
- vpaddd %xmm3,%xmm7,%xmm7
- vmovdqa %xmm0,64(%esp)
- xorl %ecx,%ebx
- shldl $5,%eax,%eax
- vpsrldq $4,%xmm3,%xmm6
- addl %esi,%edi
- andl %ebx,%ebp
- vpxor %xmm0,%xmm4,%xmm4
- xorl %ecx,%ebx
- addl %eax,%edi
- vpxor %xmm2,%xmm6,%xmm6
- shrdl $7,%eax,%eax
- xorl %ecx,%ebp
- vmovdqa %xmm7,48(%esp)
- movl %edi,%esi
- addl 4(%esp),%edx
- vpxor %xmm6,%xmm4,%xmm4
- xorl %ebx,%eax
- shldl $5,%edi,%edi
- addl %ebp,%edx
- andl %eax,%esi
- vpsrld $31,%xmm4,%xmm6
- xorl %ebx,%eax
- addl %edi,%edx
- shrdl $7,%edi,%edi
- xorl %ebx,%esi
- vpslldq $12,%xmm4,%xmm0
- vpaddd %xmm4,%xmm4,%xmm4
- movl %edx,%ebp
- addl 8(%esp),%ecx
- xorl %eax,%edi
- shldl $5,%edx,%edx
- vpsrld $30,%xmm0,%xmm7
- vpor %xmm6,%xmm4,%xmm4
- addl %esi,%ecx
- andl %edi,%ebp
- xorl %eax,%edi
- addl %edx,%ecx
- vpslld $2,%xmm0,%xmm0
- shrdl $7,%edx,%edx
- xorl %eax,%ebp
- vpxor %xmm7,%xmm4,%xmm4
- movl %ecx,%esi
- addl 12(%esp),%ebx
- xorl %edi,%edx
- shldl $5,%ecx,%ecx
- vpxor %xmm0,%xmm4,%xmm4
- addl %ebp,%ebx
- andl %edx,%esi
- vmovdqa 96(%esp),%xmm0
- xorl %edi,%edx
- addl %ecx,%ebx
- shrdl $7,%ecx,%ecx
- xorl %edi,%esi
- vpalignr $8,%xmm1,%xmm2,%xmm5
- movl %ebx,%ebp
- addl 16(%esp),%eax
- vpaddd %xmm4,%xmm0,%xmm0
- vmovdqa %xmm1,80(%esp)
- xorl %edx,%ecx
- shldl $5,%ebx,%ebx
- vpsrldq $4,%xmm4,%xmm7
- addl %esi,%eax
- andl %ecx,%ebp
- vpxor %xmm1,%xmm5,%xmm5
- xorl %edx,%ecx
- addl %ebx,%eax
- vpxor %xmm3,%xmm7,%xmm7
- shrdl $7,%ebx,%ebx
- xorl %edx,%ebp
- vmovdqa %xmm0,(%esp)
- movl %eax,%esi
- addl 20(%esp),%edi
- vpxor %xmm7,%xmm5,%xmm5
- xorl %ecx,%ebx
- shldl $5,%eax,%eax
- addl %ebp,%edi
- andl %ebx,%esi
- vpsrld $31,%xmm5,%xmm7
- xorl %ecx,%ebx
- addl %eax,%edi
- shrdl $7,%eax,%eax
- xorl %ecx,%esi
- vpslldq $12,%xmm5,%xmm1
- vpaddd %xmm5,%xmm5,%xmm5
- movl %edi,%ebp
- addl 24(%esp),%edx
- xorl %ebx,%eax
- shldl $5,%edi,%edi
- vpsrld $30,%xmm1,%xmm0
- vpor %xmm7,%xmm5,%xmm5
- addl %esi,%edx
- andl %eax,%ebp
- xorl %ebx,%eax
- addl %edi,%edx
- vpslld $2,%xmm1,%xmm1
- shrdl $7,%edi,%edi
- xorl %ebx,%ebp
- vpxor %xmm0,%xmm5,%xmm5
- movl %edx,%esi
- addl 28(%esp),%ecx
- xorl %eax,%edi
- shldl $5,%edx,%edx
- vpxor %xmm1,%xmm5,%xmm5
- addl %ebp,%ecx
- andl %edi,%esi
- vmovdqa 112(%esp),%xmm1
- xorl %eax,%edi
- addl %edx,%ecx
- shrdl $7,%edx,%edx
- xorl %eax,%esi
- vpalignr $8,%xmm2,%xmm3,%xmm6
- movl %ecx,%ebp
- addl 32(%esp),%ebx
- vpaddd %xmm5,%xmm1,%xmm1
- vmovdqa %xmm2,96(%esp)
- xorl %edi,%edx
- shldl $5,%ecx,%ecx
- vpsrldq $4,%xmm5,%xmm0
- addl %esi,%ebx
- andl %edx,%ebp
- vpxor %xmm2,%xmm6,%xmm6
- xorl %edi,%edx
- addl %ecx,%ebx
- vpxor %xmm4,%xmm0,%xmm0
- shrdl $7,%ecx,%ecx
- xorl %edi,%ebp
- vmovdqa %xmm1,16(%esp)
- movl %ebx,%esi
- addl 36(%esp),%eax
- vpxor %xmm0,%xmm6,%xmm6
- xorl %edx,%ecx
- shldl $5,%ebx,%ebx
- addl %ebp,%eax
- andl %ecx,%esi
- vpsrld $31,%xmm6,%xmm0
- xorl %edx,%ecx
- addl %ebx,%eax
- shrdl $7,%ebx,%ebx
- xorl %edx,%esi
- vpslldq $12,%xmm6,%xmm2
- vpaddd %xmm6,%xmm6,%xmm6
- movl %eax,%ebp
- addl 40(%esp),%edi
- xorl %ecx,%ebx
- shldl $5,%eax,%eax
- vpsrld $30,%xmm2,%xmm1
- vpor %xmm0,%xmm6,%xmm6
- addl %esi,%edi
- andl %ebx,%ebp
- xorl %ecx,%ebx
- addl %eax,%edi
- vpslld $2,%xmm2,%xmm2
- vmovdqa 64(%esp),%xmm0
- shrdl $7,%eax,%eax
- xorl %ecx,%ebp
- vpxor %xmm1,%xmm6,%xmm6
- movl %edi,%esi
- addl 44(%esp),%edx
- xorl %ebx,%eax
- shldl $5,%edi,%edi
- vpxor %xmm2,%xmm6,%xmm6
- addl %ebp,%edx
- andl %eax,%esi
- vmovdqa 112(%esp),%xmm2
- xorl %ebx,%eax
- addl %edi,%edx
- shrdl $7,%edi,%edi
- xorl %ebx,%esi
- vpalignr $8,%xmm3,%xmm4,%xmm7
- movl %edx,%ebp
- addl 48(%esp),%ecx
- vpaddd %xmm6,%xmm2,%xmm2
- vmovdqa %xmm3,64(%esp)
- xorl %eax,%edi
- shldl $5,%edx,%edx
- vpsrldq $4,%xmm6,%xmm1
- addl %esi,%ecx
- andl %edi,%ebp
- vpxor %xmm3,%xmm7,%xmm7
- xorl %eax,%edi
- addl %edx,%ecx
- vpxor %xmm5,%xmm1,%xmm1
- shrdl $7,%edx,%edx
- xorl %eax,%ebp
- vmovdqa %xmm2,32(%esp)
- movl %ecx,%esi
- addl 52(%esp),%ebx
- vpxor %xmm1,%xmm7,%xmm7
- xorl %edi,%edx
- shldl $5,%ecx,%ecx
- addl %ebp,%ebx
- andl %edx,%esi
- vpsrld $31,%xmm7,%xmm1
- xorl %edi,%edx
- addl %ecx,%ebx
- shrdl $7,%ecx,%ecx
- xorl %edi,%esi
- vpslldq $12,%xmm7,%xmm3
- vpaddd %xmm7,%xmm7,%xmm7
- movl %ebx,%ebp
- addl 56(%esp),%eax
- xorl %edx,%ecx
- shldl $5,%ebx,%ebx
- vpsrld $30,%xmm3,%xmm2
- vpor %xmm1,%xmm7,%xmm7
- addl %esi,%eax
- andl %ecx,%ebp
- xorl %edx,%ecx
- addl %ebx,%eax
- vpslld $2,%xmm3,%xmm3
- vmovdqa 80(%esp),%xmm1
- shrdl $7,%ebx,%ebx
- xorl %edx,%ebp
- vpxor %xmm2,%xmm7,%xmm7
- movl %eax,%esi
- addl 60(%esp),%edi
- xorl %ecx,%ebx
- shldl $5,%eax,%eax
- vpxor %xmm3,%xmm7,%xmm7
- addl %ebp,%edi
- andl %ebx,%esi
- vmovdqa 112(%esp),%xmm3
- xorl %ecx,%ebx
- addl %eax,%edi
- vpalignr $8,%xmm6,%xmm7,%xmm2
- vpxor %xmm4,%xmm0,%xmm0
- shrdl $7,%eax,%eax
- xorl %ecx,%esi
- movl %edi,%ebp
- addl (%esp),%edx
- vpxor %xmm1,%xmm0,%xmm0
- vmovdqa %xmm4,80(%esp)
- xorl %ebx,%eax
- shldl $5,%edi,%edi
- vmovdqa %xmm3,%xmm4
- vpaddd %xmm7,%xmm3,%xmm3
- addl %esi,%edx
- andl %eax,%ebp
- vpxor %xmm2,%xmm0,%xmm0
- xorl %ebx,%eax
- addl %edi,%edx
- shrdl $7,%edi,%edi
- xorl %ebx,%ebp
- vpsrld $30,%xmm0,%xmm2
- vmovdqa %xmm3,48(%esp)
- movl %edx,%esi
- addl 4(%esp),%ecx
- xorl %eax,%edi
- shldl $5,%edx,%edx
- vpslld $2,%xmm0,%xmm0
- addl %ebp,%ecx
- andl %edi,%esi
- xorl %eax,%edi
- addl %edx,%ecx
- shrdl $7,%edx,%edx
- xorl %eax,%esi
- movl %ecx,%ebp
- addl 8(%esp),%ebx
- vpor %xmm2,%xmm0,%xmm0
- xorl %edi,%edx
- shldl $5,%ecx,%ecx
- vmovdqa 96(%esp),%xmm2
- addl %esi,%ebx
- andl %edx,%ebp
- xorl %edi,%edx
- addl %ecx,%ebx
- addl 12(%esp),%eax
- xorl %edi,%ebp
- movl %ebx,%esi
- shldl $5,%ebx,%ebx
- addl %ebp,%eax
- xorl %edx,%esi
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- vpalignr $8,%xmm7,%xmm0,%xmm3
- vpxor %xmm5,%xmm1,%xmm1
- addl 16(%esp),%edi
- xorl %ecx,%esi
- movl %eax,%ebp
- shldl $5,%eax,%eax
- vpxor %xmm2,%xmm1,%xmm1
- vmovdqa %xmm5,96(%esp)
- addl %esi,%edi
- xorl %ecx,%ebp
- vmovdqa %xmm4,%xmm5
- vpaddd %xmm0,%xmm4,%xmm4
- shrdl $7,%ebx,%ebx
- addl %eax,%edi
- vpxor %xmm3,%xmm1,%xmm1
- addl 20(%esp),%edx
- xorl %ebx,%ebp
- movl %edi,%esi
- shldl $5,%edi,%edi
- vpsrld $30,%xmm1,%xmm3
- vmovdqa %xmm4,(%esp)
- addl %ebp,%edx
- xorl %ebx,%esi
- shrdl $7,%eax,%eax
- addl %edi,%edx
- vpslld $2,%xmm1,%xmm1
- addl 24(%esp),%ecx
- xorl %eax,%esi
- movl %edx,%ebp
- shldl $5,%edx,%edx
- addl %esi,%ecx
- xorl %eax,%ebp
- shrdl $7,%edi,%edi
- addl %edx,%ecx
- vpor %xmm3,%xmm1,%xmm1
- addl 28(%esp),%ebx
- xorl %edi,%ebp
- vmovdqa 64(%esp),%xmm3
- movl %ecx,%esi
- shldl $5,%ecx,%ecx
- addl %ebp,%ebx
- xorl %edi,%esi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- vpalignr $8,%xmm0,%xmm1,%xmm4
- vpxor %xmm6,%xmm2,%xmm2
- addl 32(%esp),%eax
- xorl %edx,%esi
- movl %ebx,%ebp
- shldl $5,%ebx,%ebx
- vpxor %xmm3,%xmm2,%xmm2
- vmovdqa %xmm6,64(%esp)
- addl %esi,%eax
- xorl %edx,%ebp
- vmovdqa 128(%esp),%xmm6
- vpaddd %xmm1,%xmm5,%xmm5
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- vpxor %xmm4,%xmm2,%xmm2
- addl 36(%esp),%edi
- xorl %ecx,%ebp
- movl %eax,%esi
- shldl $5,%eax,%eax
- vpsrld $30,%xmm2,%xmm4
- vmovdqa %xmm5,16(%esp)
- addl %ebp,%edi
- xorl %ecx,%esi
- shrdl $7,%ebx,%ebx
- addl %eax,%edi
- vpslld $2,%xmm2,%xmm2
- addl 40(%esp),%edx
- xorl %ebx,%esi
- movl %edi,%ebp
- shldl $5,%edi,%edi
- addl %esi,%edx
- xorl %ebx,%ebp
- shrdl $7,%eax,%eax
- addl %edi,%edx
- vpor %xmm4,%xmm2,%xmm2
- addl 44(%esp),%ecx
- xorl %eax,%ebp
- vmovdqa 80(%esp),%xmm4
- movl %edx,%esi
- shldl $5,%edx,%edx
- addl %ebp,%ecx
- xorl %eax,%esi
- shrdl $7,%edi,%edi
- addl %edx,%ecx
- vpalignr $8,%xmm1,%xmm2,%xmm5
- vpxor %xmm7,%xmm3,%xmm3
- addl 48(%esp),%ebx
- xorl %edi,%esi
- movl %ecx,%ebp
- shldl $5,%ecx,%ecx
- vpxor %xmm4,%xmm3,%xmm3
- vmovdqa %xmm7,80(%esp)
- addl %esi,%ebx
- xorl %edi,%ebp
- vmovdqa %xmm6,%xmm7
- vpaddd %xmm2,%xmm6,%xmm6
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- vpxor %xmm5,%xmm3,%xmm3
- addl 52(%esp),%eax
- xorl %edx,%ebp
- movl %ebx,%esi
- shldl $5,%ebx,%ebx
- vpsrld $30,%xmm3,%xmm5
- vmovdqa %xmm6,32(%esp)
- addl %ebp,%eax
- xorl %edx,%esi
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- vpslld $2,%xmm3,%xmm3
- addl 56(%esp),%edi
- xorl %ecx,%esi
- movl %eax,%ebp
- shldl $5,%eax,%eax
- addl %esi,%edi
- xorl %ecx,%ebp
- shrdl $7,%ebx,%ebx
- addl %eax,%edi
- vpor %xmm5,%xmm3,%xmm3
- addl 60(%esp),%edx
- xorl %ebx,%ebp
- vmovdqa 96(%esp),%xmm5
- movl %edi,%esi
- shldl $5,%edi,%edi
- addl %ebp,%edx
- xorl %ebx,%esi
- shrdl $7,%eax,%eax
- addl %edi,%edx
- vpalignr $8,%xmm2,%xmm3,%xmm6
- vpxor %xmm0,%xmm4,%xmm4
- addl (%esp),%ecx
- xorl %eax,%esi
- movl %edx,%ebp
- shldl $5,%edx,%edx
- vpxor %xmm5,%xmm4,%xmm4
- vmovdqa %xmm0,96(%esp)
- addl %esi,%ecx
- xorl %eax,%ebp
- vmovdqa %xmm7,%xmm0
- vpaddd %xmm3,%xmm7,%xmm7
- shrdl $7,%edi,%edi
- addl %edx,%ecx
- vpxor %xmm6,%xmm4,%xmm4
- addl 4(%esp),%ebx
- xorl %edi,%ebp
- movl %ecx,%esi
- shldl $5,%ecx,%ecx
- vpsrld $30,%xmm4,%xmm6
- vmovdqa %xmm7,48(%esp)
- addl %ebp,%ebx
- xorl %edi,%esi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- vpslld $2,%xmm4,%xmm4
- addl 8(%esp),%eax
- xorl %edx,%esi
- movl %ebx,%ebp
- shldl $5,%ebx,%ebx
- addl %esi,%eax
- xorl %edx,%ebp
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- vpor %xmm6,%xmm4,%xmm4
- addl 12(%esp),%edi
- xorl %ecx,%ebp
- vmovdqa 64(%esp),%xmm6
- movl %eax,%esi
- shldl $5,%eax,%eax
- addl %ebp,%edi
- xorl %ecx,%esi
- shrdl $7,%ebx,%ebx
- addl %eax,%edi
- vpalignr $8,%xmm3,%xmm4,%xmm7
- vpxor %xmm1,%xmm5,%xmm5
- addl 16(%esp),%edx
- xorl %ebx,%esi
- movl %edi,%ebp
- shldl $5,%edi,%edi
- vpxor %xmm6,%xmm5,%xmm5
- vmovdqa %xmm1,64(%esp)
- addl %esi,%edx
- xorl %ebx,%ebp
- vmovdqa %xmm0,%xmm1
- vpaddd %xmm4,%xmm0,%xmm0
- shrdl $7,%eax,%eax
- addl %edi,%edx
- vpxor %xmm7,%xmm5,%xmm5
- addl 20(%esp),%ecx
- xorl %eax,%ebp
- movl %edx,%esi
- shldl $5,%edx,%edx
- vpsrld $30,%xmm5,%xmm7
- vmovdqa %xmm0,(%esp)
- addl %ebp,%ecx
- xorl %eax,%esi
- shrdl $7,%edi,%edi
- addl %edx,%ecx
- vpslld $2,%xmm5,%xmm5
- addl 24(%esp),%ebx
- xorl %edi,%esi
- movl %ecx,%ebp
- shldl $5,%ecx,%ecx
- addl %esi,%ebx
- xorl %edi,%ebp
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- vpor %xmm7,%xmm5,%xmm5
- addl 28(%esp),%eax
- vmovdqa 80(%esp),%xmm7
- shrdl $7,%ecx,%ecx
- movl %ebx,%esi
- xorl %edx,%ebp
- shldl $5,%ebx,%ebx
- addl %ebp,%eax
- xorl %ecx,%esi
- xorl %edx,%ecx
- addl %ebx,%eax
- vpalignr $8,%xmm4,%xmm5,%xmm0
- vpxor %xmm2,%xmm6,%xmm6
- addl 32(%esp),%edi
- andl %ecx,%esi
- xorl %edx,%ecx
- shrdl $7,%ebx,%ebx
- vpxor %xmm7,%xmm6,%xmm6
- vmovdqa %xmm2,80(%esp)
- movl %eax,%ebp
- xorl %ecx,%esi
- vmovdqa %xmm1,%xmm2
- vpaddd %xmm5,%xmm1,%xmm1
- shldl $5,%eax,%eax
- addl %esi,%edi
- vpxor %xmm0,%xmm6,%xmm6
- xorl %ebx,%ebp
- xorl %ecx,%ebx
- addl %eax,%edi
- addl 36(%esp),%edx
- vpsrld $30,%xmm6,%xmm0
- vmovdqa %xmm1,16(%esp)
- andl %ebx,%ebp
- xorl %ecx,%ebx
- shrdl $7,%eax,%eax
- movl %edi,%esi
- vpslld $2,%xmm6,%xmm6
- xorl %ebx,%ebp
- shldl $5,%edi,%edi
- addl %ebp,%edx
- xorl %eax,%esi
- xorl %ebx,%eax
- addl %edi,%edx
- addl 40(%esp),%ecx
- andl %eax,%esi
- vpor %xmm0,%xmm6,%xmm6
- xorl %ebx,%eax
- shrdl $7,%edi,%edi
- vmovdqa 96(%esp),%xmm0
- movl %edx,%ebp
- xorl %eax,%esi
- shldl $5,%edx,%edx
- addl %esi,%ecx
- xorl %edi,%ebp
- xorl %eax,%edi
- addl %edx,%ecx
- addl 44(%esp),%ebx
- andl %edi,%ebp
- xorl %eax,%edi
- shrdl $7,%edx,%edx
- movl %ecx,%esi
- xorl %edi,%ebp
- shldl $5,%ecx,%ecx
- addl %ebp,%ebx
- xorl %edx,%esi
- xorl %edi,%edx
- addl %ecx,%ebx
- vpalignr $8,%xmm5,%xmm6,%xmm1
- vpxor %xmm3,%xmm7,%xmm7
- addl 48(%esp),%eax
- andl %edx,%esi
- xorl %edi,%edx
- shrdl $7,%ecx,%ecx
- vpxor %xmm0,%xmm7,%xmm7
- vmovdqa %xmm3,96(%esp)
- movl %ebx,%ebp
- xorl %edx,%esi
- vmovdqa 144(%esp),%xmm3
- vpaddd %xmm6,%xmm2,%xmm2
- shldl $5,%ebx,%ebx
- addl %esi,%eax
- vpxor %xmm1,%xmm7,%xmm7
- xorl %ecx,%ebp
- xorl %edx,%ecx
- addl %ebx,%eax
- addl 52(%esp),%edi
- vpsrld $30,%xmm7,%xmm1
- vmovdqa %xmm2,32(%esp)
- andl %ecx,%ebp
- xorl %edx,%ecx
- shrdl $7,%ebx,%ebx
- movl %eax,%esi
- vpslld $2,%xmm7,%xmm7
- xorl %ecx,%ebp
- shldl $5,%eax,%eax
- addl %ebp,%edi
- xorl %ebx,%esi
- xorl %ecx,%ebx
- addl %eax,%edi
- addl 56(%esp),%edx
- andl %ebx,%esi
- vpor %xmm1,%xmm7,%xmm7
- xorl %ecx,%ebx
- shrdl $7,%eax,%eax
- vmovdqa 64(%esp),%xmm1
- movl %edi,%ebp
- xorl %ebx,%esi
- shldl $5,%edi,%edi
- addl %esi,%edx
- xorl %eax,%ebp
- xorl %ebx,%eax
- addl %edi,%edx
- addl 60(%esp),%ecx
- andl %eax,%ebp
- xorl %ebx,%eax
- shrdl $7,%edi,%edi
- movl %edx,%esi
- xorl %eax,%ebp
- shldl $5,%edx,%edx
- addl %ebp,%ecx
- xorl %edi,%esi
- xorl %eax,%edi
- addl %edx,%ecx
- vpalignr $8,%xmm6,%xmm7,%xmm2
- vpxor %xmm4,%xmm0,%xmm0
- addl (%esp),%ebx
- andl %edi,%esi
- xorl %eax,%edi
- shrdl $7,%edx,%edx
- vpxor %xmm1,%xmm0,%xmm0
- vmovdqa %xmm4,64(%esp)
- movl %ecx,%ebp
- xorl %edi,%esi
- vmovdqa %xmm3,%xmm4
- vpaddd %xmm7,%xmm3,%xmm3
- shldl $5,%ecx,%ecx
- addl %esi,%ebx
- vpxor %xmm2,%xmm0,%xmm0
- xorl %edx,%ebp
- xorl %edi,%edx
- addl %ecx,%ebx
- addl 4(%esp),%eax
- vpsrld $30,%xmm0,%xmm2
- vmovdqa %xmm3,48(%esp)
- andl %edx,%ebp
- xorl %edi,%edx
- shrdl $7,%ecx,%ecx
- movl %ebx,%esi
- vpslld $2,%xmm0,%xmm0
- xorl %edx,%ebp
- shldl $5,%ebx,%ebx
- addl %ebp,%eax
- xorl %ecx,%esi
- xorl %edx,%ecx
- addl %ebx,%eax
- addl 8(%esp),%edi
- andl %ecx,%esi
- vpor %xmm2,%xmm0,%xmm0
- xorl %edx,%ecx
- shrdl $7,%ebx,%ebx
- vmovdqa 80(%esp),%xmm2
- movl %eax,%ebp
- xorl %ecx,%esi
- shldl $5,%eax,%eax
- addl %esi,%edi
- xorl %ebx,%ebp
- xorl %ecx,%ebx
- addl %eax,%edi
- addl 12(%esp),%edx
- andl %ebx,%ebp
- xorl %ecx,%ebx
- shrdl $7,%eax,%eax
- movl %edi,%esi
- xorl %ebx,%ebp
- shldl $5,%edi,%edi
- addl %ebp,%edx
- xorl %eax,%esi
- xorl %ebx,%eax
- addl %edi,%edx
- vpalignr $8,%xmm7,%xmm0,%xmm3
- vpxor %xmm5,%xmm1,%xmm1
- addl 16(%esp),%ecx
- andl %eax,%esi
- xorl %ebx,%eax
- shrdl $7,%edi,%edi
- vpxor %xmm2,%xmm1,%xmm1
- vmovdqa %xmm5,80(%esp)
- movl %edx,%ebp
- xorl %eax,%esi
- vmovdqa %xmm4,%xmm5
- vpaddd %xmm0,%xmm4,%xmm4
- shldl $5,%edx,%edx
- addl %esi,%ecx
- vpxor %xmm3,%xmm1,%xmm1
- xorl %edi,%ebp
- xorl %eax,%edi
- addl %edx,%ecx
- addl 20(%esp),%ebx
- vpsrld $30,%xmm1,%xmm3
- vmovdqa %xmm4,(%esp)
- andl %edi,%ebp
- xorl %eax,%edi
- shrdl $7,%edx,%edx
- movl %ecx,%esi
- vpslld $2,%xmm1,%xmm1
- xorl %edi,%ebp
- shldl $5,%ecx,%ecx
- addl %ebp,%ebx
- xorl %edx,%esi
- xorl %edi,%edx
- addl %ecx,%ebx
- addl 24(%esp),%eax
- andl %edx,%esi
- vpor %xmm3,%xmm1,%xmm1
- xorl %edi,%edx
- shrdl $7,%ecx,%ecx
- vmovdqa 96(%esp),%xmm3
- movl %ebx,%ebp
- xorl %edx,%esi
- shldl $5,%ebx,%ebx
- addl %esi,%eax
- xorl %ecx,%ebp
- xorl %edx,%ecx
- addl %ebx,%eax
- addl 28(%esp),%edi
- andl %ecx,%ebp
- xorl %edx,%ecx
- shrdl $7,%ebx,%ebx
- movl %eax,%esi
- xorl %ecx,%ebp
- shldl $5,%eax,%eax
- addl %ebp,%edi
- xorl %ebx,%esi
- xorl %ecx,%ebx
- addl %eax,%edi
- vpalignr $8,%xmm0,%xmm1,%xmm4
- vpxor %xmm6,%xmm2,%xmm2
- addl 32(%esp),%edx
- andl %ebx,%esi
- xorl %ecx,%ebx
- shrdl $7,%eax,%eax
- vpxor %xmm3,%xmm2,%xmm2
- vmovdqa %xmm6,96(%esp)
- movl %edi,%ebp
- xorl %ebx,%esi
- vmovdqa %xmm5,%xmm6
- vpaddd %xmm1,%xmm5,%xmm5
- shldl $5,%edi,%edi
- addl %esi,%edx
- vpxor %xmm4,%xmm2,%xmm2
- xorl %eax,%ebp
- xorl %ebx,%eax
- addl %edi,%edx
- addl 36(%esp),%ecx
- vpsrld $30,%xmm2,%xmm4
- vmovdqa %xmm5,16(%esp)
- andl %eax,%ebp
- xorl %ebx,%eax
- shrdl $7,%edi,%edi
- movl %edx,%esi
- vpslld $2,%xmm2,%xmm2
- xorl %eax,%ebp
- shldl $5,%edx,%edx
- addl %ebp,%ecx
- xorl %edi,%esi
- xorl %eax,%edi
- addl %edx,%ecx
- addl 40(%esp),%ebx
- andl %edi,%esi
- vpor %xmm4,%xmm2,%xmm2
- xorl %eax,%edi
- shrdl $7,%edx,%edx
- vmovdqa 64(%esp),%xmm4
- movl %ecx,%ebp
- xorl %edi,%esi
- shldl $5,%ecx,%ecx
- addl %esi,%ebx
- xorl %edx,%ebp
- xorl %edi,%edx
- addl %ecx,%ebx
- addl 44(%esp),%eax
- andl %edx,%ebp
- xorl %edi,%edx
- shrdl $7,%ecx,%ecx
- movl %ebx,%esi
- xorl %edx,%ebp
- shldl $5,%ebx,%ebx
- addl %ebp,%eax
- xorl %edx,%esi
- addl %ebx,%eax
- vpalignr $8,%xmm1,%xmm2,%xmm5
- vpxor %xmm7,%xmm3,%xmm3
- addl 48(%esp),%edi
- xorl %ecx,%esi
- movl %eax,%ebp
- shldl $5,%eax,%eax
- vpxor %xmm4,%xmm3,%xmm3
- vmovdqa %xmm7,64(%esp)
- addl %esi,%edi
- xorl %ecx,%ebp
- vmovdqa %xmm6,%xmm7
- vpaddd %xmm2,%xmm6,%xmm6
- shrdl $7,%ebx,%ebx
- addl %eax,%edi
- vpxor %xmm5,%xmm3,%xmm3
- addl 52(%esp),%edx
- xorl %ebx,%ebp
- movl %edi,%esi
- shldl $5,%edi,%edi
- vpsrld $30,%xmm3,%xmm5
- vmovdqa %xmm6,32(%esp)
- addl %ebp,%edx
- xorl %ebx,%esi
- shrdl $7,%eax,%eax
- addl %edi,%edx
- vpslld $2,%xmm3,%xmm3
- addl 56(%esp),%ecx
- xorl %eax,%esi
- movl %edx,%ebp
- shldl $5,%edx,%edx
- addl %esi,%ecx
- xorl %eax,%ebp
- shrdl $7,%edi,%edi
- addl %edx,%ecx
- vpor %xmm5,%xmm3,%xmm3
- addl 60(%esp),%ebx
- xorl %edi,%ebp
- movl %ecx,%esi
- shldl $5,%ecx,%ecx
- addl %ebp,%ebx
- xorl %edi,%esi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- addl (%esp),%eax
- vpaddd %xmm3,%xmm7,%xmm7
- xorl %edx,%esi
- movl %ebx,%ebp
- shldl $5,%ebx,%ebx
- addl %esi,%eax
- vmovdqa %xmm7,48(%esp)
- xorl %edx,%ebp
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- addl 4(%esp),%edi
- xorl %ecx,%ebp
- movl %eax,%esi
- shldl $5,%eax,%eax
- addl %ebp,%edi
- xorl %ecx,%esi
- shrdl $7,%ebx,%ebx
- addl %eax,%edi
- addl 8(%esp),%edx
- xorl %ebx,%esi
- movl %edi,%ebp
- shldl $5,%edi,%edi
- addl %esi,%edx
- xorl %ebx,%ebp
- shrdl $7,%eax,%eax
- addl %edi,%edx
- addl 12(%esp),%ecx
- xorl %eax,%ebp
- movl %edx,%esi
- shldl $5,%edx,%edx
- addl %ebp,%ecx
- xorl %eax,%esi
- shrdl $7,%edi,%edi
- addl %edx,%ecx
- movl 196(%esp),%ebp
- cmpl 200(%esp),%ebp
- je .L010done
- vmovdqa 160(%esp),%xmm7
- vmovdqa 176(%esp),%xmm6
- vmovdqu (%ebp),%xmm0
- vmovdqu 16(%ebp),%xmm1
- vmovdqu 32(%ebp),%xmm2
- vmovdqu 48(%ebp),%xmm3
- addl $64,%ebp
- vpshufb %xmm6,%xmm0,%xmm0
- movl %ebp,196(%esp)
- vmovdqa %xmm7,96(%esp)
- addl 16(%esp),%ebx
- xorl %edi,%esi
- vpshufb %xmm6,%xmm1,%xmm1
- movl %ecx,%ebp
- shldl $5,%ecx,%ecx
- vpaddd %xmm7,%xmm0,%xmm4
- addl %esi,%ebx
- xorl %edi,%ebp
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- vmovdqa %xmm4,(%esp)
- addl 20(%esp),%eax
- xorl %edx,%ebp
- movl %ebx,%esi
- shldl $5,%ebx,%ebx
- addl %ebp,%eax
- xorl %edx,%esi
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- addl 24(%esp),%edi
- xorl %ecx,%esi
- movl %eax,%ebp
- shldl $5,%eax,%eax
- addl %esi,%edi
- xorl %ecx,%ebp
- shrdl $7,%ebx,%ebx
- addl %eax,%edi
- addl 28(%esp),%edx
- xorl %ebx,%ebp
- movl %edi,%esi
- shldl $5,%edi,%edi
- addl %ebp,%edx
- xorl %ebx,%esi
- shrdl $7,%eax,%eax
- addl %edi,%edx
- addl 32(%esp),%ecx
- xorl %eax,%esi
- vpshufb %xmm6,%xmm2,%xmm2
- movl %edx,%ebp
- shldl $5,%edx,%edx
- vpaddd %xmm7,%xmm1,%xmm5
- addl %esi,%ecx
- xorl %eax,%ebp
- shrdl $7,%edi,%edi
- addl %edx,%ecx
- vmovdqa %xmm5,16(%esp)
- addl 36(%esp),%ebx
- xorl %edi,%ebp
- movl %ecx,%esi
- shldl $5,%ecx,%ecx
- addl %ebp,%ebx
- xorl %edi,%esi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- addl 40(%esp),%eax
- xorl %edx,%esi
- movl %ebx,%ebp
- shldl $5,%ebx,%ebx
- addl %esi,%eax
- xorl %edx,%ebp
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- addl 44(%esp),%edi
- xorl %ecx,%ebp
- movl %eax,%esi
- shldl $5,%eax,%eax
- addl %ebp,%edi
- xorl %ecx,%esi
- shrdl $7,%ebx,%ebx
- addl %eax,%edi
- addl 48(%esp),%edx
- xorl %ebx,%esi
- vpshufb %xmm6,%xmm3,%xmm3
- movl %edi,%ebp
- shldl $5,%edi,%edi
- vpaddd %xmm7,%xmm2,%xmm6
- addl %esi,%edx
- xorl %ebx,%ebp
- shrdl $7,%eax,%eax
- addl %edi,%edx
- vmovdqa %xmm6,32(%esp)
- addl 52(%esp),%ecx
- xorl %eax,%ebp
- movl %edx,%esi
- shldl $5,%edx,%edx
- addl %ebp,%ecx
- xorl %eax,%esi
- shrdl $7,%edi,%edi
- addl %edx,%ecx
- addl 56(%esp),%ebx
- xorl %edi,%esi
- movl %ecx,%ebp
- shldl $5,%ecx,%ecx
- addl %esi,%ebx
- xorl %edi,%ebp
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- addl 60(%esp),%eax
- xorl %edx,%ebp
- movl %ebx,%esi
- shldl $5,%ebx,%ebx
- addl %ebp,%eax
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- movl 192(%esp),%ebp
- addl (%ebp),%eax
- addl 4(%ebp),%esi
- addl 8(%ebp),%ecx
- movl %eax,(%ebp)
- addl 12(%ebp),%edx
- movl %esi,4(%ebp)
- addl 16(%ebp),%edi
- movl %ecx,%ebx
- movl %ecx,8(%ebp)
- xorl %edx,%ebx
- movl %edx,12(%ebp)
- movl %edi,16(%ebp)
- movl %esi,%ebp
- andl %ebx,%esi
- movl %ebp,%ebx
- jmp .L009loop
-.align 16
-.L010done:
- addl 16(%esp),%ebx
- xorl %edi,%esi
- movl %ecx,%ebp
- shldl $5,%ecx,%ecx
- addl %esi,%ebx
- xorl %edi,%ebp
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- addl 20(%esp),%eax
- xorl %edx,%ebp
- movl %ebx,%esi
- shldl $5,%ebx,%ebx
- addl %ebp,%eax
- xorl %edx,%esi
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- addl 24(%esp),%edi
- xorl %ecx,%esi
- movl %eax,%ebp
- shldl $5,%eax,%eax
- addl %esi,%edi
- xorl %ecx,%ebp
- shrdl $7,%ebx,%ebx
- addl %eax,%edi
- addl 28(%esp),%edx
- xorl %ebx,%ebp
- movl %edi,%esi
- shldl $5,%edi,%edi
- addl %ebp,%edx
- xorl %ebx,%esi
- shrdl $7,%eax,%eax
- addl %edi,%edx
- addl 32(%esp),%ecx
- xorl %eax,%esi
- movl %edx,%ebp
- shldl $5,%edx,%edx
- addl %esi,%ecx
- xorl %eax,%ebp
- shrdl $7,%edi,%edi
- addl %edx,%ecx
- addl 36(%esp),%ebx
- xorl %edi,%ebp
- movl %ecx,%esi
- shldl $5,%ecx,%ecx
- addl %ebp,%ebx
- xorl %edi,%esi
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- addl 40(%esp),%eax
- xorl %edx,%esi
- movl %ebx,%ebp
- shldl $5,%ebx,%ebx
- addl %esi,%eax
- xorl %edx,%ebp
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- addl 44(%esp),%edi
- xorl %ecx,%ebp
- movl %eax,%esi
- shldl $5,%eax,%eax
- addl %ebp,%edi
- xorl %ecx,%esi
- shrdl $7,%ebx,%ebx
- addl %eax,%edi
- addl 48(%esp),%edx
- xorl %ebx,%esi
- movl %edi,%ebp
- shldl $5,%edi,%edi
- addl %esi,%edx
- xorl %ebx,%ebp
- shrdl $7,%eax,%eax
- addl %edi,%edx
- addl 52(%esp),%ecx
- xorl %eax,%ebp
- movl %edx,%esi
- shldl $5,%edx,%edx
- addl %ebp,%ecx
- xorl %eax,%esi
- shrdl $7,%edi,%edi
- addl %edx,%ecx
- addl 56(%esp),%ebx
- xorl %edi,%esi
- movl %ecx,%ebp
- shldl $5,%ecx,%ecx
- addl %esi,%ebx
- xorl %edi,%ebp
- shrdl $7,%edx,%edx
- addl %ecx,%ebx
- addl 60(%esp),%eax
- xorl %edx,%ebp
- movl %ebx,%esi
- shldl $5,%ebx,%ebx
- addl %ebp,%eax
- shrdl $7,%ecx,%ecx
- addl %ebx,%eax
- vzeroall
- movl 192(%esp),%ebp
- addl (%ebp),%eax
- movl 204(%esp),%esp
- addl 4(%ebp),%esi
- addl 8(%ebp),%ecx
- movl %eax,(%ebp)
- addl 12(%ebp),%edx
- movl %esi,4(%ebp)
- addl 16(%ebp),%edi
- movl %ecx,8(%ebp)
- movl %edx,12(%ebp)
- movl %edi,16(%ebp)
- popl %edi
- popl %esi
- popl %ebx
- popl %ebp
- ret
-.size _sha1_block_data_order_avx,.-_sha1_block_data_order_avx
.align 64
.LK_XX_XX:
.long 1518500249,1518500249,1518500249,1518500249
diff --git a/secure/lib/libcrypto/i386/sha256-586.S b/secure/lib/libcrypto/i386/sha256-586.S
index 7b4205352bdf9..5d8476c1e1bb1 100644
--- a/secure/lib/libcrypto/i386/sha256-586.S
+++ b/secure/lib/libcrypto/i386/sha256-586.S
@@ -42,13 +42,12 @@ sha256_block_data_order:
orl %ebx,%ecx
andl $1342177280,%ecx
cmpl $1342177280,%ecx
- je .L005AVX
testl $512,%ebx
- jnz .L006SSSE3
+ jnz .L005SSSE3
.L003no_xmm:
subl %edi,%eax
cmpl $256,%eax
- jae .L007unrolled
+ jae .L006unrolled
jmp .L002loop
.align 16
.L002loop:
@@ -120,7 +119,7 @@ sha256_block_data_order:
movl %ecx,28(%esp)
movl %edi,32(%esp)
.align 16
-.L00800_15:
+.L00700_15:
movl %edx,%ecx
movl 24(%esp),%esi
rorl $14,%ecx
@@ -158,11 +157,11 @@ sha256_block_data_order:
addl $4,%ebp
addl %ebx,%eax
cmpl $3248222580,%esi
- jne .L00800_15
+ jne .L00700_15
movl 156(%esp),%ecx
- jmp .L00916_63
+ jmp .L00816_63
.align 16
-.L00916_63:
+.L00816_63:
movl %ecx,%ebx
movl 104(%esp),%esi
rorl $11,%ecx
@@ -217,7 +216,7 @@ sha256_block_data_order:
addl $4,%ebp
addl %ebx,%eax
cmpl $3329325298,%esi
- jne .L00916_63
+ jne .L00816_63
movl 356(%esp),%esi
movl 8(%esp),%ebx
movl 16(%esp),%ecx
@@ -261,7 +260,7 @@ sha256_block_data_order:
.byte 112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103
.byte 62,0
.align 16
-.L007unrolled:
+.L006unrolled:
leal -96(%esp),%esp
movl (%esi),%eax
movl 4(%esi),%ebp
@@ -278,9 +277,9 @@ sha256_block_data_order:
movl %ebx,20(%esp)
movl %ecx,24(%esp)
movl %esi,28(%esp)
- jmp .L010grand_loop
+ jmp .L009grand_loop
.align 16
-.L010grand_loop:
+.L009grand_loop:
movl (%edi),%ebx
movl 4(%edi),%ecx
bswap %ebx
@@ -3160,7 +3159,7 @@ sha256_block_data_order:
movl %ebx,24(%esp)
movl %ecx,28(%esp)
cmpl 104(%esp),%edi
- jb .L010grand_loop
+ jb .L009grand_loop
movl 108(%esp),%esp
popl %edi
popl %esi
@@ -3179,9 +3178,9 @@ sha256_block_data_order:
pshufd $27,%xmm2,%xmm2
.byte 102,15,58,15,202,8
punpcklqdq %xmm0,%xmm2
- jmp .L011loop_shaext
+ jmp .L010loop_shaext
.align 16
-.L011loop_shaext:
+.L010loop_shaext:
movdqu (%edi),%xmm3
movdqu 16(%edi),%xmm4
movdqu 32(%edi),%xmm5
@@ -3351,7 +3350,7 @@ sha256_block_data_order:
.byte 15,56,203,202
paddd 16(%esp),%xmm2
paddd (%esp),%xmm1
- jnz .L011loop_shaext
+ jnz .L010loop_shaext
pshufd $177,%xmm2,%xmm2
pshufd $27,%xmm1,%xmm7
pshufd $177,%xmm1,%xmm1
@@ -3366,7 +3365,7 @@ sha256_block_data_order:
popl %ebp
ret
.align 32
-.L006SSSE3:
+.L005SSSE3:
leal -96(%esp),%esp
movl (%esi),%eax
movl 4(%esi),%ebx
@@ -3385,9 +3384,9 @@ sha256_block_data_order:
movl %ecx,24(%esp)
movl %esi,28(%esp)
movdqa 256(%ebp),%xmm7
- jmp .L012grand_ssse3
+ jmp .L011grand_ssse3
.align 16
-.L012grand_ssse3:
+.L011grand_ssse3:
movdqu (%edi),%xmm0
movdqu 16(%edi),%xmm1
movdqu 32(%edi),%xmm2
@@ -3410,9 +3409,9 @@ sha256_block_data_order:
paddd %xmm3,%xmm7
movdqa %xmm6,64(%esp)
movdqa %xmm7,80(%esp)
- jmp .L013ssse3_00_47
+ jmp .L012ssse3_00_47
.align 16
-.L013ssse3_00_47:
+.L012ssse3_00_47:
addl $64,%ebp
movl %edx,%ecx
movdqa %xmm1,%xmm4
@@ -4055,7 +4054,7 @@ sha256_block_data_order:
addl %ecx,%eax
movdqa %xmm6,80(%esp)
cmpl $66051,64(%ebp)
- jne .L013ssse3_00_47
+ jne .L012ssse3_00_47
movl %edx,%ecx
rorl $14,%edx
movl 20(%esp),%esi
@@ -4569,2218 +4568,13 @@ sha256_block_data_order:
movdqa 64(%ebp),%xmm7
subl $192,%ebp
cmpl 104(%esp),%edi
- jb .L012grand_ssse3
+ jb .L011grand_ssse3
movl 108(%esp),%esp
popl %edi
popl %esi
popl %ebx
popl %ebp
ret
-.align 32
-.L005AVX:
- andl $264,%edx
- cmpl $264,%edx
- je .L014AVX_BMI
- leal -96(%esp),%esp
- vzeroall
- movl (%esi),%eax
- movl 4(%esi),%ebx
- movl 8(%esi),%ecx
- movl 12(%esi),%edi
- movl %ebx,4(%esp)
- xorl %ecx,%ebx
- movl %ecx,8(%esp)
- movl %edi,12(%esp)
- movl 16(%esi),%edx
- movl 20(%esi),%edi
- movl 24(%esi),%ecx
- movl 28(%esi),%esi
- movl %edi,20(%esp)
- movl 100(%esp),%edi
- movl %ecx,24(%esp)
- movl %esi,28(%esp)
- vmovdqa 256(%ebp),%xmm7
- jmp .L015grand_avx
-.align 32
-.L015grand_avx:
- vmovdqu (%edi),%xmm0
- vmovdqu 16(%edi),%xmm1
- vmovdqu 32(%edi),%xmm2
- vmovdqu 48(%edi),%xmm3
- addl $64,%edi
- vpshufb %xmm7,%xmm0,%xmm0
- movl %edi,100(%esp)
- vpshufb %xmm7,%xmm1,%xmm1
- vpshufb %xmm7,%xmm2,%xmm2
- vpaddd (%ebp),%xmm0,%xmm4
- vpshufb %xmm7,%xmm3,%xmm3
- vpaddd 16(%ebp),%xmm1,%xmm5
- vpaddd 32(%ebp),%xmm2,%xmm6
- vpaddd 48(%ebp),%xmm3,%xmm7
- vmovdqa %xmm4,32(%esp)
- vmovdqa %xmm5,48(%esp)
- vmovdqa %xmm6,64(%esp)
- vmovdqa %xmm7,80(%esp)
- jmp .L016avx_00_47
-.align 16
-.L016avx_00_47:
- addl $64,%ebp
- vpalignr $4,%xmm0,%xmm1,%xmm4
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 20(%esp),%esi
- vpalignr $4,%xmm2,%xmm3,%xmm7
- xorl %ecx,%edx
- movl 24(%esp),%edi
- xorl %edi,%esi
- vpsrld $7,%xmm4,%xmm6
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,16(%esp)
- vpaddd %xmm7,%xmm0,%xmm0
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- vpsrld $3,%xmm4,%xmm7
- movl %eax,%ecx
- addl %edi,%edx
- movl 4(%esp),%edi
- vpslld $14,%xmm4,%xmm5
- movl %eax,%esi
- shrdl $9,%ecx,%ecx
- movl %eax,(%esp)
- vpxor %xmm6,%xmm7,%xmm4
- xorl %eax,%ecx
- xorl %edi,%eax
- addl 28(%esp),%edx
- vpshufd $250,%xmm3,%xmm7
- shrdl $11,%ecx,%ecx
- andl %eax,%ebx
- xorl %esi,%ecx
- vpsrld $11,%xmm6,%xmm6
- addl 32(%esp),%edx
- xorl %edi,%ebx
- shrdl $2,%ecx,%ecx
- vpxor %xmm5,%xmm4,%xmm4
- addl %edx,%ebx
- addl 12(%esp),%edx
- addl %ecx,%ebx
- vpslld $11,%xmm5,%xmm5
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 16(%esp),%esi
- vpxor %xmm6,%xmm4,%xmm4
- xorl %ecx,%edx
- movl 20(%esp),%edi
- xorl %edi,%esi
- vpsrld $10,%xmm7,%xmm6
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,12(%esp)
- vpxor %xmm5,%xmm4,%xmm4
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- vpsrlq $17,%xmm7,%xmm5
- movl %ebx,%ecx
- addl %edi,%edx
- movl (%esp),%edi
- vpaddd %xmm4,%xmm0,%xmm0
- movl %ebx,%esi
- shrdl $9,%ecx,%ecx
- movl %ebx,28(%esp)
- vpxor %xmm5,%xmm6,%xmm6
- xorl %ebx,%ecx
- xorl %edi,%ebx
- addl 24(%esp),%edx
- vpsrlq $19,%xmm7,%xmm7
- shrdl $11,%ecx,%ecx
- andl %ebx,%eax
- xorl %esi,%ecx
- vpxor %xmm7,%xmm6,%xmm6
- addl 36(%esp),%edx
- xorl %edi,%eax
- shrdl $2,%ecx,%ecx
- vpshufd $132,%xmm6,%xmm7
- addl %edx,%eax
- addl 8(%esp),%edx
- addl %ecx,%eax
- vpsrldq $8,%xmm7,%xmm7
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 12(%esp),%esi
- vpaddd %xmm7,%xmm0,%xmm0
- xorl %ecx,%edx
- movl 16(%esp),%edi
- xorl %edi,%esi
- vpshufd $80,%xmm0,%xmm7
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,8(%esp)
- vpsrld $10,%xmm7,%xmm6
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- vpsrlq $17,%xmm7,%xmm5
- movl %eax,%ecx
- addl %edi,%edx
- movl 28(%esp),%edi
- vpxor %xmm5,%xmm6,%xmm6
- movl %eax,%esi
- shrdl $9,%ecx,%ecx
- movl %eax,24(%esp)
- vpsrlq $19,%xmm7,%xmm7
- xorl %eax,%ecx
- xorl %edi,%eax
- addl 20(%esp),%edx
- vpxor %xmm7,%xmm6,%xmm6
- shrdl $11,%ecx,%ecx
- andl %eax,%ebx
- xorl %esi,%ecx
- vpshufd $232,%xmm6,%xmm7
- addl 40(%esp),%edx
- xorl %edi,%ebx
- shrdl $2,%ecx,%ecx
- vpslldq $8,%xmm7,%xmm7
- addl %edx,%ebx
- addl 4(%esp),%edx
- addl %ecx,%ebx
- vpaddd %xmm7,%xmm0,%xmm0
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 8(%esp),%esi
- vpaddd (%ebp),%xmm0,%xmm6
- xorl %ecx,%edx
- movl 12(%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,4(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %ebx,%ecx
- addl %edi,%edx
- movl 24(%esp),%edi
- movl %ebx,%esi
- shrdl $9,%ecx,%ecx
- movl %ebx,20(%esp)
- xorl %ebx,%ecx
- xorl %edi,%ebx
- addl 16(%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %ebx,%eax
- xorl %esi,%ecx
- addl 44(%esp),%edx
- xorl %edi,%eax
- shrdl $2,%ecx,%ecx
- addl %edx,%eax
- addl (%esp),%edx
- addl %ecx,%eax
- vmovdqa %xmm6,32(%esp)
- vpalignr $4,%xmm1,%xmm2,%xmm4
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 4(%esp),%esi
- vpalignr $4,%xmm3,%xmm0,%xmm7
- xorl %ecx,%edx
- movl 8(%esp),%edi
- xorl %edi,%esi
- vpsrld $7,%xmm4,%xmm6
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,(%esp)
- vpaddd %xmm7,%xmm1,%xmm1
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- vpsrld $3,%xmm4,%xmm7
- movl %eax,%ecx
- addl %edi,%edx
- movl 20(%esp),%edi
- vpslld $14,%xmm4,%xmm5
- movl %eax,%esi
- shrdl $9,%ecx,%ecx
- movl %eax,16(%esp)
- vpxor %xmm6,%xmm7,%xmm4
- xorl %eax,%ecx
- xorl %edi,%eax
- addl 12(%esp),%edx
- vpshufd $250,%xmm0,%xmm7
- shrdl $11,%ecx,%ecx
- andl %eax,%ebx
- xorl %esi,%ecx
- vpsrld $11,%xmm6,%xmm6
- addl 48(%esp),%edx
- xorl %edi,%ebx
- shrdl $2,%ecx,%ecx
- vpxor %xmm5,%xmm4,%xmm4
- addl %edx,%ebx
- addl 28(%esp),%edx
- addl %ecx,%ebx
- vpslld $11,%xmm5,%xmm5
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl (%esp),%esi
- vpxor %xmm6,%xmm4,%xmm4
- xorl %ecx,%edx
- movl 4(%esp),%edi
- xorl %edi,%esi
- vpsrld $10,%xmm7,%xmm6
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,28(%esp)
- vpxor %xmm5,%xmm4,%xmm4
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- vpsrlq $17,%xmm7,%xmm5
- movl %ebx,%ecx
- addl %edi,%edx
- movl 16(%esp),%edi
- vpaddd %xmm4,%xmm1,%xmm1
- movl %ebx,%esi
- shrdl $9,%ecx,%ecx
- movl %ebx,12(%esp)
- vpxor %xmm5,%xmm6,%xmm6
- xorl %ebx,%ecx
- xorl %edi,%ebx
- addl 8(%esp),%edx
- vpsrlq $19,%xmm7,%xmm7
- shrdl $11,%ecx,%ecx
- andl %ebx,%eax
- xorl %esi,%ecx
- vpxor %xmm7,%xmm6,%xmm6
- addl 52(%esp),%edx
- xorl %edi,%eax
- shrdl $2,%ecx,%ecx
- vpshufd $132,%xmm6,%xmm7
- addl %edx,%eax
- addl 24(%esp),%edx
- addl %ecx,%eax
- vpsrldq $8,%xmm7,%xmm7
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 28(%esp),%esi
- vpaddd %xmm7,%xmm1,%xmm1
- xorl %ecx,%edx
- movl (%esp),%edi
- xorl %edi,%esi
- vpshufd $80,%xmm1,%xmm7
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,24(%esp)
- vpsrld $10,%xmm7,%xmm6
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- vpsrlq $17,%xmm7,%xmm5
- movl %eax,%ecx
- addl %edi,%edx
- movl 12(%esp),%edi
- vpxor %xmm5,%xmm6,%xmm6
- movl %eax,%esi
- shrdl $9,%ecx,%ecx
- movl %eax,8(%esp)
- vpsrlq $19,%xmm7,%xmm7
- xorl %eax,%ecx
- xorl %edi,%eax
- addl 4(%esp),%edx
- vpxor %xmm7,%xmm6,%xmm6
- shrdl $11,%ecx,%ecx
- andl %eax,%ebx
- xorl %esi,%ecx
- vpshufd $232,%xmm6,%xmm7
- addl 56(%esp),%edx
- xorl %edi,%ebx
- shrdl $2,%ecx,%ecx
- vpslldq $8,%xmm7,%xmm7
- addl %edx,%ebx
- addl 20(%esp),%edx
- addl %ecx,%ebx
- vpaddd %xmm7,%xmm1,%xmm1
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 24(%esp),%esi
- vpaddd 16(%ebp),%xmm1,%xmm6
- xorl %ecx,%edx
- movl 28(%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,20(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %ebx,%ecx
- addl %edi,%edx
- movl 8(%esp),%edi
- movl %ebx,%esi
- shrdl $9,%ecx,%ecx
- movl %ebx,4(%esp)
- xorl %ebx,%ecx
- xorl %edi,%ebx
- addl (%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %ebx,%eax
- xorl %esi,%ecx
- addl 60(%esp),%edx
- xorl %edi,%eax
- shrdl $2,%ecx,%ecx
- addl %edx,%eax
- addl 16(%esp),%edx
- addl %ecx,%eax
- vmovdqa %xmm6,48(%esp)
- vpalignr $4,%xmm2,%xmm3,%xmm4
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 20(%esp),%esi
- vpalignr $4,%xmm0,%xmm1,%xmm7
- xorl %ecx,%edx
- movl 24(%esp),%edi
- xorl %edi,%esi
- vpsrld $7,%xmm4,%xmm6
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,16(%esp)
- vpaddd %xmm7,%xmm2,%xmm2
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- vpsrld $3,%xmm4,%xmm7
- movl %eax,%ecx
- addl %edi,%edx
- movl 4(%esp),%edi
- vpslld $14,%xmm4,%xmm5
- movl %eax,%esi
- shrdl $9,%ecx,%ecx
- movl %eax,(%esp)
- vpxor %xmm6,%xmm7,%xmm4
- xorl %eax,%ecx
- xorl %edi,%eax
- addl 28(%esp),%edx
- vpshufd $250,%xmm1,%xmm7
- shrdl $11,%ecx,%ecx
- andl %eax,%ebx
- xorl %esi,%ecx
- vpsrld $11,%xmm6,%xmm6
- addl 64(%esp),%edx
- xorl %edi,%ebx
- shrdl $2,%ecx,%ecx
- vpxor %xmm5,%xmm4,%xmm4
- addl %edx,%ebx
- addl 12(%esp),%edx
- addl %ecx,%ebx
- vpslld $11,%xmm5,%xmm5
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 16(%esp),%esi
- vpxor %xmm6,%xmm4,%xmm4
- xorl %ecx,%edx
- movl 20(%esp),%edi
- xorl %edi,%esi
- vpsrld $10,%xmm7,%xmm6
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,12(%esp)
- vpxor %xmm5,%xmm4,%xmm4
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- vpsrlq $17,%xmm7,%xmm5
- movl %ebx,%ecx
- addl %edi,%edx
- movl (%esp),%edi
- vpaddd %xmm4,%xmm2,%xmm2
- movl %ebx,%esi
- shrdl $9,%ecx,%ecx
- movl %ebx,28(%esp)
- vpxor %xmm5,%xmm6,%xmm6
- xorl %ebx,%ecx
- xorl %edi,%ebx
- addl 24(%esp),%edx
- vpsrlq $19,%xmm7,%xmm7
- shrdl $11,%ecx,%ecx
- andl %ebx,%eax
- xorl %esi,%ecx
- vpxor %xmm7,%xmm6,%xmm6
- addl 68(%esp),%edx
- xorl %edi,%eax
- shrdl $2,%ecx,%ecx
- vpshufd $132,%xmm6,%xmm7
- addl %edx,%eax
- addl 8(%esp),%edx
- addl %ecx,%eax
- vpsrldq $8,%xmm7,%xmm7
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 12(%esp),%esi
- vpaddd %xmm7,%xmm2,%xmm2
- xorl %ecx,%edx
- movl 16(%esp),%edi
- xorl %edi,%esi
- vpshufd $80,%xmm2,%xmm7
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,8(%esp)
- vpsrld $10,%xmm7,%xmm6
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- vpsrlq $17,%xmm7,%xmm5
- movl %eax,%ecx
- addl %edi,%edx
- movl 28(%esp),%edi
- vpxor %xmm5,%xmm6,%xmm6
- movl %eax,%esi
- shrdl $9,%ecx,%ecx
- movl %eax,24(%esp)
- vpsrlq $19,%xmm7,%xmm7
- xorl %eax,%ecx
- xorl %edi,%eax
- addl 20(%esp),%edx
- vpxor %xmm7,%xmm6,%xmm6
- shrdl $11,%ecx,%ecx
- andl %eax,%ebx
- xorl %esi,%ecx
- vpshufd $232,%xmm6,%xmm7
- addl 72(%esp),%edx
- xorl %edi,%ebx
- shrdl $2,%ecx,%ecx
- vpslldq $8,%xmm7,%xmm7
- addl %edx,%ebx
- addl 4(%esp),%edx
- addl %ecx,%ebx
- vpaddd %xmm7,%xmm2,%xmm2
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 8(%esp),%esi
- vpaddd 32(%ebp),%xmm2,%xmm6
- xorl %ecx,%edx
- movl 12(%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,4(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %ebx,%ecx
- addl %edi,%edx
- movl 24(%esp),%edi
- movl %ebx,%esi
- shrdl $9,%ecx,%ecx
- movl %ebx,20(%esp)
- xorl %ebx,%ecx
- xorl %edi,%ebx
- addl 16(%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %ebx,%eax
- xorl %esi,%ecx
- addl 76(%esp),%edx
- xorl %edi,%eax
- shrdl $2,%ecx,%ecx
- addl %edx,%eax
- addl (%esp),%edx
- addl %ecx,%eax
- vmovdqa %xmm6,64(%esp)
- vpalignr $4,%xmm3,%xmm0,%xmm4
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 4(%esp),%esi
- vpalignr $4,%xmm1,%xmm2,%xmm7
- xorl %ecx,%edx
- movl 8(%esp),%edi
- xorl %edi,%esi
- vpsrld $7,%xmm4,%xmm6
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,(%esp)
- vpaddd %xmm7,%xmm3,%xmm3
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- vpsrld $3,%xmm4,%xmm7
- movl %eax,%ecx
- addl %edi,%edx
- movl 20(%esp),%edi
- vpslld $14,%xmm4,%xmm5
- movl %eax,%esi
- shrdl $9,%ecx,%ecx
- movl %eax,16(%esp)
- vpxor %xmm6,%xmm7,%xmm4
- xorl %eax,%ecx
- xorl %edi,%eax
- addl 12(%esp),%edx
- vpshufd $250,%xmm2,%xmm7
- shrdl $11,%ecx,%ecx
- andl %eax,%ebx
- xorl %esi,%ecx
- vpsrld $11,%xmm6,%xmm6
- addl 80(%esp),%edx
- xorl %edi,%ebx
- shrdl $2,%ecx,%ecx
- vpxor %xmm5,%xmm4,%xmm4
- addl %edx,%ebx
- addl 28(%esp),%edx
- addl %ecx,%ebx
- vpslld $11,%xmm5,%xmm5
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl (%esp),%esi
- vpxor %xmm6,%xmm4,%xmm4
- xorl %ecx,%edx
- movl 4(%esp),%edi
- xorl %edi,%esi
- vpsrld $10,%xmm7,%xmm6
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,28(%esp)
- vpxor %xmm5,%xmm4,%xmm4
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- vpsrlq $17,%xmm7,%xmm5
- movl %ebx,%ecx
- addl %edi,%edx
- movl 16(%esp),%edi
- vpaddd %xmm4,%xmm3,%xmm3
- movl %ebx,%esi
- shrdl $9,%ecx,%ecx
- movl %ebx,12(%esp)
- vpxor %xmm5,%xmm6,%xmm6
- xorl %ebx,%ecx
- xorl %edi,%ebx
- addl 8(%esp),%edx
- vpsrlq $19,%xmm7,%xmm7
- shrdl $11,%ecx,%ecx
- andl %ebx,%eax
- xorl %esi,%ecx
- vpxor %xmm7,%xmm6,%xmm6
- addl 84(%esp),%edx
- xorl %edi,%eax
- shrdl $2,%ecx,%ecx
- vpshufd $132,%xmm6,%xmm7
- addl %edx,%eax
- addl 24(%esp),%edx
- addl %ecx,%eax
- vpsrldq $8,%xmm7,%xmm7
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 28(%esp),%esi
- vpaddd %xmm7,%xmm3,%xmm3
- xorl %ecx,%edx
- movl (%esp),%edi
- xorl %edi,%esi
- vpshufd $80,%xmm3,%xmm7
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,24(%esp)
- vpsrld $10,%xmm7,%xmm6
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- vpsrlq $17,%xmm7,%xmm5
- movl %eax,%ecx
- addl %edi,%edx
- movl 12(%esp),%edi
- vpxor %xmm5,%xmm6,%xmm6
- movl %eax,%esi
- shrdl $9,%ecx,%ecx
- movl %eax,8(%esp)
- vpsrlq $19,%xmm7,%xmm7
- xorl %eax,%ecx
- xorl %edi,%eax
- addl 4(%esp),%edx
- vpxor %xmm7,%xmm6,%xmm6
- shrdl $11,%ecx,%ecx
- andl %eax,%ebx
- xorl %esi,%ecx
- vpshufd $232,%xmm6,%xmm7
- addl 88(%esp),%edx
- xorl %edi,%ebx
- shrdl $2,%ecx,%ecx
- vpslldq $8,%xmm7,%xmm7
- addl %edx,%ebx
- addl 20(%esp),%edx
- addl %ecx,%ebx
- vpaddd %xmm7,%xmm3,%xmm3
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 24(%esp),%esi
- vpaddd 48(%ebp),%xmm3,%xmm6
- xorl %ecx,%edx
- movl 28(%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,20(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %ebx,%ecx
- addl %edi,%edx
- movl 8(%esp),%edi
- movl %ebx,%esi
- shrdl $9,%ecx,%ecx
- movl %ebx,4(%esp)
- xorl %ebx,%ecx
- xorl %edi,%ebx
- addl (%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %ebx,%eax
- xorl %esi,%ecx
- addl 92(%esp),%edx
- xorl %edi,%eax
- shrdl $2,%ecx,%ecx
- addl %edx,%eax
- addl 16(%esp),%edx
- addl %ecx,%eax
- vmovdqa %xmm6,80(%esp)
- cmpl $66051,64(%ebp)
- jne .L016avx_00_47
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 20(%esp),%esi
- xorl %ecx,%edx
- movl 24(%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,16(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %eax,%ecx
- addl %edi,%edx
- movl 4(%esp),%edi
- movl %eax,%esi
- shrdl $9,%ecx,%ecx
- movl %eax,(%esp)
- xorl %eax,%ecx
- xorl %edi,%eax
- addl 28(%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %eax,%ebx
- xorl %esi,%ecx
- addl 32(%esp),%edx
- xorl %edi,%ebx
- shrdl $2,%ecx,%ecx
- addl %edx,%ebx
- addl 12(%esp),%edx
- addl %ecx,%ebx
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 16(%esp),%esi
- xorl %ecx,%edx
- movl 20(%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,12(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %ebx,%ecx
- addl %edi,%edx
- movl (%esp),%edi
- movl %ebx,%esi
- shrdl $9,%ecx,%ecx
- movl %ebx,28(%esp)
- xorl %ebx,%ecx
- xorl %edi,%ebx
- addl 24(%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %ebx,%eax
- xorl %esi,%ecx
- addl 36(%esp),%edx
- xorl %edi,%eax
- shrdl $2,%ecx,%ecx
- addl %edx,%eax
- addl 8(%esp),%edx
- addl %ecx,%eax
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 12(%esp),%esi
- xorl %ecx,%edx
- movl 16(%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,8(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %eax,%ecx
- addl %edi,%edx
- movl 28(%esp),%edi
- movl %eax,%esi
- shrdl $9,%ecx,%ecx
- movl %eax,24(%esp)
- xorl %eax,%ecx
- xorl %edi,%eax
- addl 20(%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %eax,%ebx
- xorl %esi,%ecx
- addl 40(%esp),%edx
- xorl %edi,%ebx
- shrdl $2,%ecx,%ecx
- addl %edx,%ebx
- addl 4(%esp),%edx
- addl %ecx,%ebx
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 8(%esp),%esi
- xorl %ecx,%edx
- movl 12(%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,4(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %ebx,%ecx
- addl %edi,%edx
- movl 24(%esp),%edi
- movl %ebx,%esi
- shrdl $9,%ecx,%ecx
- movl %ebx,20(%esp)
- xorl %ebx,%ecx
- xorl %edi,%ebx
- addl 16(%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %ebx,%eax
- xorl %esi,%ecx
- addl 44(%esp),%edx
- xorl %edi,%eax
- shrdl $2,%ecx,%ecx
- addl %edx,%eax
- addl (%esp),%edx
- addl %ecx,%eax
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 4(%esp),%esi
- xorl %ecx,%edx
- movl 8(%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %eax,%ecx
- addl %edi,%edx
- movl 20(%esp),%edi
- movl %eax,%esi
- shrdl $9,%ecx,%ecx
- movl %eax,16(%esp)
- xorl %eax,%ecx
- xorl %edi,%eax
- addl 12(%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %eax,%ebx
- xorl %esi,%ecx
- addl 48(%esp),%edx
- xorl %edi,%ebx
- shrdl $2,%ecx,%ecx
- addl %edx,%ebx
- addl 28(%esp),%edx
- addl %ecx,%ebx
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl (%esp),%esi
- xorl %ecx,%edx
- movl 4(%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,28(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %ebx,%ecx
- addl %edi,%edx
- movl 16(%esp),%edi
- movl %ebx,%esi
- shrdl $9,%ecx,%ecx
- movl %ebx,12(%esp)
- xorl %ebx,%ecx
- xorl %edi,%ebx
- addl 8(%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %ebx,%eax
- xorl %esi,%ecx
- addl 52(%esp),%edx
- xorl %edi,%eax
- shrdl $2,%ecx,%ecx
- addl %edx,%eax
- addl 24(%esp),%edx
- addl %ecx,%eax
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 28(%esp),%esi
- xorl %ecx,%edx
- movl (%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,24(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %eax,%ecx
- addl %edi,%edx
- movl 12(%esp),%edi
- movl %eax,%esi
- shrdl $9,%ecx,%ecx
- movl %eax,8(%esp)
- xorl %eax,%ecx
- xorl %edi,%eax
- addl 4(%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %eax,%ebx
- xorl %esi,%ecx
- addl 56(%esp),%edx
- xorl %edi,%ebx
- shrdl $2,%ecx,%ecx
- addl %edx,%ebx
- addl 20(%esp),%edx
- addl %ecx,%ebx
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 24(%esp),%esi
- xorl %ecx,%edx
- movl 28(%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,20(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %ebx,%ecx
- addl %edi,%edx
- movl 8(%esp),%edi
- movl %ebx,%esi
- shrdl $9,%ecx,%ecx
- movl %ebx,4(%esp)
- xorl %ebx,%ecx
- xorl %edi,%ebx
- addl (%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %ebx,%eax
- xorl %esi,%ecx
- addl 60(%esp),%edx
- xorl %edi,%eax
- shrdl $2,%ecx,%ecx
- addl %edx,%eax
- addl 16(%esp),%edx
- addl %ecx,%eax
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 20(%esp),%esi
- xorl %ecx,%edx
- movl 24(%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,16(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %eax,%ecx
- addl %edi,%edx
- movl 4(%esp),%edi
- movl %eax,%esi
- shrdl $9,%ecx,%ecx
- movl %eax,(%esp)
- xorl %eax,%ecx
- xorl %edi,%eax
- addl 28(%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %eax,%ebx
- xorl %esi,%ecx
- addl 64(%esp),%edx
- xorl %edi,%ebx
- shrdl $2,%ecx,%ecx
- addl %edx,%ebx
- addl 12(%esp),%edx
- addl %ecx,%ebx
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 16(%esp),%esi
- xorl %ecx,%edx
- movl 20(%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,12(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %ebx,%ecx
- addl %edi,%edx
- movl (%esp),%edi
- movl %ebx,%esi
- shrdl $9,%ecx,%ecx
- movl %ebx,28(%esp)
- xorl %ebx,%ecx
- xorl %edi,%ebx
- addl 24(%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %ebx,%eax
- xorl %esi,%ecx
- addl 68(%esp),%edx
- xorl %edi,%eax
- shrdl $2,%ecx,%ecx
- addl %edx,%eax
- addl 8(%esp),%edx
- addl %ecx,%eax
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 12(%esp),%esi
- xorl %ecx,%edx
- movl 16(%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,8(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %eax,%ecx
- addl %edi,%edx
- movl 28(%esp),%edi
- movl %eax,%esi
- shrdl $9,%ecx,%ecx
- movl %eax,24(%esp)
- xorl %eax,%ecx
- xorl %edi,%eax
- addl 20(%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %eax,%ebx
- xorl %esi,%ecx
- addl 72(%esp),%edx
- xorl %edi,%ebx
- shrdl $2,%ecx,%ecx
- addl %edx,%ebx
- addl 4(%esp),%edx
- addl %ecx,%ebx
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 8(%esp),%esi
- xorl %ecx,%edx
- movl 12(%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,4(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %ebx,%ecx
- addl %edi,%edx
- movl 24(%esp),%edi
- movl %ebx,%esi
- shrdl $9,%ecx,%ecx
- movl %ebx,20(%esp)
- xorl %ebx,%ecx
- xorl %edi,%ebx
- addl 16(%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %ebx,%eax
- xorl %esi,%ecx
- addl 76(%esp),%edx
- xorl %edi,%eax
- shrdl $2,%ecx,%ecx
- addl %edx,%eax
- addl (%esp),%edx
- addl %ecx,%eax
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 4(%esp),%esi
- xorl %ecx,%edx
- movl 8(%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %eax,%ecx
- addl %edi,%edx
- movl 20(%esp),%edi
- movl %eax,%esi
- shrdl $9,%ecx,%ecx
- movl %eax,16(%esp)
- xorl %eax,%ecx
- xorl %edi,%eax
- addl 12(%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %eax,%ebx
- xorl %esi,%ecx
- addl 80(%esp),%edx
- xorl %edi,%ebx
- shrdl $2,%ecx,%ecx
- addl %edx,%ebx
- addl 28(%esp),%edx
- addl %ecx,%ebx
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl (%esp),%esi
- xorl %ecx,%edx
- movl 4(%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,28(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %ebx,%ecx
- addl %edi,%edx
- movl 16(%esp),%edi
- movl %ebx,%esi
- shrdl $9,%ecx,%ecx
- movl %ebx,12(%esp)
- xorl %ebx,%ecx
- xorl %edi,%ebx
- addl 8(%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %ebx,%eax
- xorl %esi,%ecx
- addl 84(%esp),%edx
- xorl %edi,%eax
- shrdl $2,%ecx,%ecx
- addl %edx,%eax
- addl 24(%esp),%edx
- addl %ecx,%eax
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 28(%esp),%esi
- xorl %ecx,%edx
- movl (%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,24(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %eax,%ecx
- addl %edi,%edx
- movl 12(%esp),%edi
- movl %eax,%esi
- shrdl $9,%ecx,%ecx
- movl %eax,8(%esp)
- xorl %eax,%ecx
- xorl %edi,%eax
- addl 4(%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %eax,%ebx
- xorl %esi,%ecx
- addl 88(%esp),%edx
- xorl %edi,%ebx
- shrdl $2,%ecx,%ecx
- addl %edx,%ebx
- addl 20(%esp),%edx
- addl %ecx,%ebx
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 24(%esp),%esi
- xorl %ecx,%edx
- movl 28(%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,20(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %ebx,%ecx
- addl %edi,%edx
- movl 8(%esp),%edi
- movl %ebx,%esi
- shrdl $9,%ecx,%ecx
- movl %ebx,4(%esp)
- xorl %ebx,%ecx
- xorl %edi,%ebx
- addl (%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %ebx,%eax
- xorl %esi,%ecx
- addl 92(%esp),%edx
- xorl %edi,%eax
- shrdl $2,%ecx,%ecx
- addl %edx,%eax
- addl 16(%esp),%edx
- addl %ecx,%eax
- movl 96(%esp),%esi
- xorl %edi,%ebx
- movl 12(%esp),%ecx
- addl (%esi),%eax
- addl 4(%esi),%ebx
- addl 8(%esi),%edi
- addl 12(%esi),%ecx
- movl %eax,(%esi)
- movl %ebx,4(%esi)
- movl %edi,8(%esi)
- movl %ecx,12(%esi)
- movl %ebx,4(%esp)
- xorl %edi,%ebx
- movl %edi,8(%esp)
- movl %ecx,12(%esp)
- movl 20(%esp),%edi
- movl 24(%esp),%ecx
- addl 16(%esi),%edx
- addl 20(%esi),%edi
- addl 24(%esi),%ecx
- movl %edx,16(%esi)
- movl %edi,20(%esi)
- movl %edi,20(%esp)
- movl 28(%esp),%edi
- movl %ecx,24(%esi)
- addl 28(%esi),%edi
- movl %ecx,24(%esp)
- movl %edi,28(%esi)
- movl %edi,28(%esp)
- movl 100(%esp),%edi
- vmovdqa 64(%ebp),%xmm7
- subl $192,%ebp
- cmpl 104(%esp),%edi
- jb .L015grand_avx
- movl 108(%esp),%esp
- vzeroall
- popl %edi
- popl %esi
- popl %ebx
- popl %ebp
- ret
-.align 32
-.L014AVX_BMI:
- leal -96(%esp),%esp
- vzeroall
- movl (%esi),%eax
- movl 4(%esi),%ebx
- movl 8(%esi),%ecx
- movl 12(%esi),%edi
- movl %ebx,4(%esp)
- xorl %ecx,%ebx
- movl %ecx,8(%esp)
- movl %edi,12(%esp)
- movl 16(%esi),%edx
- movl 20(%esi),%edi
- movl 24(%esi),%ecx
- movl 28(%esi),%esi
- movl %edi,20(%esp)
- movl 100(%esp),%edi
- movl %ecx,24(%esp)
- movl %esi,28(%esp)
- vmovdqa 256(%ebp),%xmm7
- jmp .L017grand_avx_bmi
-.align 32
-.L017grand_avx_bmi:
- vmovdqu (%edi),%xmm0
- vmovdqu 16(%edi),%xmm1
- vmovdqu 32(%edi),%xmm2
- vmovdqu 48(%edi),%xmm3
- addl $64,%edi
- vpshufb %xmm7,%xmm0,%xmm0
- movl %edi,100(%esp)
- vpshufb %xmm7,%xmm1,%xmm1
- vpshufb %xmm7,%xmm2,%xmm2
- vpaddd (%ebp),%xmm0,%xmm4
- vpshufb %xmm7,%xmm3,%xmm3
- vpaddd 16(%ebp),%xmm1,%xmm5
- vpaddd 32(%ebp),%xmm2,%xmm6
- vpaddd 48(%ebp),%xmm3,%xmm7
- vmovdqa %xmm4,32(%esp)
- vmovdqa %xmm5,48(%esp)
- vmovdqa %xmm6,64(%esp)
- vmovdqa %xmm7,80(%esp)
- jmp .L018avx_bmi_00_47
-.align 16
-.L018avx_bmi_00_47:
- addl $64,%ebp
- vpalignr $4,%xmm0,%xmm1,%xmm4
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,16(%esp)
- vpalignr $4,%xmm2,%xmm3,%xmm7
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl 24(%esp),%edx,%esi
- vpsrld $7,%xmm4,%xmm6
- xorl %edi,%ecx
- andl 20(%esp),%edx
- movl %eax,(%esp)
- vpaddd %xmm7,%xmm0,%xmm0
- orl %esi,%edx
- rorxl $2,%eax,%edi
- rorxl $13,%eax,%esi
- vpsrld $3,%xmm4,%xmm7
- leal (%edx,%ecx,1),%edx
- rorxl $22,%eax,%ecx
- xorl %edi,%esi
- vpslld $14,%xmm4,%xmm5
- movl 4(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%eax
- vpxor %xmm6,%xmm7,%xmm4
- addl 28(%esp),%edx
- andl %eax,%ebx
- addl 32(%esp),%edx
- vpshufd $250,%xmm3,%xmm7
- xorl %edi,%ebx
- addl %edx,%ecx
- addl 12(%esp),%edx
- vpsrld $11,%xmm6,%xmm6
- leal (%ebx,%ecx,1),%ebx
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- vpxor %xmm5,%xmm4,%xmm4
- movl %edx,12(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- vpslld $11,%xmm5,%xmm5
- andnl 20(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 16(%esp),%edx
- vpxor %xmm6,%xmm4,%xmm4
- movl %ebx,28(%esp)
- orl %esi,%edx
- rorxl $2,%ebx,%edi
- rorxl $13,%ebx,%esi
- vpsrld $10,%xmm7,%xmm6
- leal (%edx,%ecx,1),%edx
- rorxl $22,%ebx,%ecx
- xorl %edi,%esi
- vpxor %xmm5,%xmm4,%xmm4
- movl (%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%ebx
- vpsrlq $17,%xmm7,%xmm5
- addl 24(%esp),%edx
- andl %ebx,%eax
- addl 36(%esp),%edx
- vpaddd %xmm4,%xmm0,%xmm0
- xorl %edi,%eax
- addl %edx,%ecx
- addl 8(%esp),%edx
- vpxor %xmm5,%xmm6,%xmm6
- leal (%eax,%ecx,1),%eax
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- vpsrlq $19,%xmm7,%xmm7
- movl %edx,8(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- vpxor %xmm7,%xmm6,%xmm6
- andnl 16(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 12(%esp),%edx
- vpshufd $132,%xmm6,%xmm7
- movl %eax,24(%esp)
- orl %esi,%edx
- rorxl $2,%eax,%edi
- rorxl $13,%eax,%esi
- vpsrldq $8,%xmm7,%xmm7
- leal (%edx,%ecx,1),%edx
- rorxl $22,%eax,%ecx
- xorl %edi,%esi
- vpaddd %xmm7,%xmm0,%xmm0
- movl 28(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%eax
- vpshufd $80,%xmm0,%xmm7
- addl 20(%esp),%edx
- andl %eax,%ebx
- addl 40(%esp),%edx
- vpsrld $10,%xmm7,%xmm6
- xorl %edi,%ebx
- addl %edx,%ecx
- addl 4(%esp),%edx
- vpsrlq $17,%xmm7,%xmm5
- leal (%ebx,%ecx,1),%ebx
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- vpxor %xmm5,%xmm6,%xmm6
- movl %edx,4(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- vpsrlq $19,%xmm7,%xmm7
- andnl 12(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 8(%esp),%edx
- vpxor %xmm7,%xmm6,%xmm6
- movl %ebx,20(%esp)
- orl %esi,%edx
- rorxl $2,%ebx,%edi
- rorxl $13,%ebx,%esi
- vpshufd $232,%xmm6,%xmm7
- leal (%edx,%ecx,1),%edx
- rorxl $22,%ebx,%ecx
- xorl %edi,%esi
- vpslldq $8,%xmm7,%xmm7
- movl 24(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%ebx
- vpaddd %xmm7,%xmm0,%xmm0
- addl 16(%esp),%edx
- andl %ebx,%eax
- addl 44(%esp),%edx
- vpaddd (%ebp),%xmm0,%xmm6
- xorl %edi,%eax
- addl %edx,%ecx
- addl (%esp),%edx
- leal (%eax,%ecx,1),%eax
- vmovdqa %xmm6,32(%esp)
- vpalignr $4,%xmm1,%xmm2,%xmm4
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,(%esp)
- vpalignr $4,%xmm3,%xmm0,%xmm7
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl 8(%esp),%edx,%esi
- vpsrld $7,%xmm4,%xmm6
- xorl %edi,%ecx
- andl 4(%esp),%edx
- movl %eax,16(%esp)
- vpaddd %xmm7,%xmm1,%xmm1
- orl %esi,%edx
- rorxl $2,%eax,%edi
- rorxl $13,%eax,%esi
- vpsrld $3,%xmm4,%xmm7
- leal (%edx,%ecx,1),%edx
- rorxl $22,%eax,%ecx
- xorl %edi,%esi
- vpslld $14,%xmm4,%xmm5
- movl 20(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%eax
- vpxor %xmm6,%xmm7,%xmm4
- addl 12(%esp),%edx
- andl %eax,%ebx
- addl 48(%esp),%edx
- vpshufd $250,%xmm0,%xmm7
- xorl %edi,%ebx
- addl %edx,%ecx
- addl 28(%esp),%edx
- vpsrld $11,%xmm6,%xmm6
- leal (%ebx,%ecx,1),%ebx
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- vpxor %xmm5,%xmm4,%xmm4
- movl %edx,28(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- vpslld $11,%xmm5,%xmm5
- andnl 4(%esp),%edx,%esi
- xorl %edi,%ecx
- andl (%esp),%edx
- vpxor %xmm6,%xmm4,%xmm4
- movl %ebx,12(%esp)
- orl %esi,%edx
- rorxl $2,%ebx,%edi
- rorxl $13,%ebx,%esi
- vpsrld $10,%xmm7,%xmm6
- leal (%edx,%ecx,1),%edx
- rorxl $22,%ebx,%ecx
- xorl %edi,%esi
- vpxor %xmm5,%xmm4,%xmm4
- movl 16(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%ebx
- vpsrlq $17,%xmm7,%xmm5
- addl 8(%esp),%edx
- andl %ebx,%eax
- addl 52(%esp),%edx
- vpaddd %xmm4,%xmm1,%xmm1
- xorl %edi,%eax
- addl %edx,%ecx
- addl 24(%esp),%edx
- vpxor %xmm5,%xmm6,%xmm6
- leal (%eax,%ecx,1),%eax
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- vpsrlq $19,%xmm7,%xmm7
- movl %edx,24(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- vpxor %xmm7,%xmm6,%xmm6
- andnl (%esp),%edx,%esi
- xorl %edi,%ecx
- andl 28(%esp),%edx
- vpshufd $132,%xmm6,%xmm7
- movl %eax,8(%esp)
- orl %esi,%edx
- rorxl $2,%eax,%edi
- rorxl $13,%eax,%esi
- vpsrldq $8,%xmm7,%xmm7
- leal (%edx,%ecx,1),%edx
- rorxl $22,%eax,%ecx
- xorl %edi,%esi
- vpaddd %xmm7,%xmm1,%xmm1
- movl 12(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%eax
- vpshufd $80,%xmm1,%xmm7
- addl 4(%esp),%edx
- andl %eax,%ebx
- addl 56(%esp),%edx
- vpsrld $10,%xmm7,%xmm6
- xorl %edi,%ebx
- addl %edx,%ecx
- addl 20(%esp),%edx
- vpsrlq $17,%xmm7,%xmm5
- leal (%ebx,%ecx,1),%ebx
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- vpxor %xmm5,%xmm6,%xmm6
- movl %edx,20(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- vpsrlq $19,%xmm7,%xmm7
- andnl 28(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 24(%esp),%edx
- vpxor %xmm7,%xmm6,%xmm6
- movl %ebx,4(%esp)
- orl %esi,%edx
- rorxl $2,%ebx,%edi
- rorxl $13,%ebx,%esi
- vpshufd $232,%xmm6,%xmm7
- leal (%edx,%ecx,1),%edx
- rorxl $22,%ebx,%ecx
- xorl %edi,%esi
- vpslldq $8,%xmm7,%xmm7
- movl 8(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%ebx
- vpaddd %xmm7,%xmm1,%xmm1
- addl (%esp),%edx
- andl %ebx,%eax
- addl 60(%esp),%edx
- vpaddd 16(%ebp),%xmm1,%xmm6
- xorl %edi,%eax
- addl %edx,%ecx
- addl 16(%esp),%edx
- leal (%eax,%ecx,1),%eax
- vmovdqa %xmm6,48(%esp)
- vpalignr $4,%xmm2,%xmm3,%xmm4
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,16(%esp)
- vpalignr $4,%xmm0,%xmm1,%xmm7
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl 24(%esp),%edx,%esi
- vpsrld $7,%xmm4,%xmm6
- xorl %edi,%ecx
- andl 20(%esp),%edx
- movl %eax,(%esp)
- vpaddd %xmm7,%xmm2,%xmm2
- orl %esi,%edx
- rorxl $2,%eax,%edi
- rorxl $13,%eax,%esi
- vpsrld $3,%xmm4,%xmm7
- leal (%edx,%ecx,1),%edx
- rorxl $22,%eax,%ecx
- xorl %edi,%esi
- vpslld $14,%xmm4,%xmm5
- movl 4(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%eax
- vpxor %xmm6,%xmm7,%xmm4
- addl 28(%esp),%edx
- andl %eax,%ebx
- addl 64(%esp),%edx
- vpshufd $250,%xmm1,%xmm7
- xorl %edi,%ebx
- addl %edx,%ecx
- addl 12(%esp),%edx
- vpsrld $11,%xmm6,%xmm6
- leal (%ebx,%ecx,1),%ebx
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- vpxor %xmm5,%xmm4,%xmm4
- movl %edx,12(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- vpslld $11,%xmm5,%xmm5
- andnl 20(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 16(%esp),%edx
- vpxor %xmm6,%xmm4,%xmm4
- movl %ebx,28(%esp)
- orl %esi,%edx
- rorxl $2,%ebx,%edi
- rorxl $13,%ebx,%esi
- vpsrld $10,%xmm7,%xmm6
- leal (%edx,%ecx,1),%edx
- rorxl $22,%ebx,%ecx
- xorl %edi,%esi
- vpxor %xmm5,%xmm4,%xmm4
- movl (%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%ebx
- vpsrlq $17,%xmm7,%xmm5
- addl 24(%esp),%edx
- andl %ebx,%eax
- addl 68(%esp),%edx
- vpaddd %xmm4,%xmm2,%xmm2
- xorl %edi,%eax
- addl %edx,%ecx
- addl 8(%esp),%edx
- vpxor %xmm5,%xmm6,%xmm6
- leal (%eax,%ecx,1),%eax
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- vpsrlq $19,%xmm7,%xmm7
- movl %edx,8(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- vpxor %xmm7,%xmm6,%xmm6
- andnl 16(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 12(%esp),%edx
- vpshufd $132,%xmm6,%xmm7
- movl %eax,24(%esp)
- orl %esi,%edx
- rorxl $2,%eax,%edi
- rorxl $13,%eax,%esi
- vpsrldq $8,%xmm7,%xmm7
- leal (%edx,%ecx,1),%edx
- rorxl $22,%eax,%ecx
- xorl %edi,%esi
- vpaddd %xmm7,%xmm2,%xmm2
- movl 28(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%eax
- vpshufd $80,%xmm2,%xmm7
- addl 20(%esp),%edx
- andl %eax,%ebx
- addl 72(%esp),%edx
- vpsrld $10,%xmm7,%xmm6
- xorl %edi,%ebx
- addl %edx,%ecx
- addl 4(%esp),%edx
- vpsrlq $17,%xmm7,%xmm5
- leal (%ebx,%ecx,1),%ebx
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- vpxor %xmm5,%xmm6,%xmm6
- movl %edx,4(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- vpsrlq $19,%xmm7,%xmm7
- andnl 12(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 8(%esp),%edx
- vpxor %xmm7,%xmm6,%xmm6
- movl %ebx,20(%esp)
- orl %esi,%edx
- rorxl $2,%ebx,%edi
- rorxl $13,%ebx,%esi
- vpshufd $232,%xmm6,%xmm7
- leal (%edx,%ecx,1),%edx
- rorxl $22,%ebx,%ecx
- xorl %edi,%esi
- vpslldq $8,%xmm7,%xmm7
- movl 24(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%ebx
- vpaddd %xmm7,%xmm2,%xmm2
- addl 16(%esp),%edx
- andl %ebx,%eax
- addl 76(%esp),%edx
- vpaddd 32(%ebp),%xmm2,%xmm6
- xorl %edi,%eax
- addl %edx,%ecx
- addl (%esp),%edx
- leal (%eax,%ecx,1),%eax
- vmovdqa %xmm6,64(%esp)
- vpalignr $4,%xmm3,%xmm0,%xmm4
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,(%esp)
- vpalignr $4,%xmm1,%xmm2,%xmm7
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl 8(%esp),%edx,%esi
- vpsrld $7,%xmm4,%xmm6
- xorl %edi,%ecx
- andl 4(%esp),%edx
- movl %eax,16(%esp)
- vpaddd %xmm7,%xmm3,%xmm3
- orl %esi,%edx
- rorxl $2,%eax,%edi
- rorxl $13,%eax,%esi
- vpsrld $3,%xmm4,%xmm7
- leal (%edx,%ecx,1),%edx
- rorxl $22,%eax,%ecx
- xorl %edi,%esi
- vpslld $14,%xmm4,%xmm5
- movl 20(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%eax
- vpxor %xmm6,%xmm7,%xmm4
- addl 12(%esp),%edx
- andl %eax,%ebx
- addl 80(%esp),%edx
- vpshufd $250,%xmm2,%xmm7
- xorl %edi,%ebx
- addl %edx,%ecx
- addl 28(%esp),%edx
- vpsrld $11,%xmm6,%xmm6
- leal (%ebx,%ecx,1),%ebx
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- vpxor %xmm5,%xmm4,%xmm4
- movl %edx,28(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- vpslld $11,%xmm5,%xmm5
- andnl 4(%esp),%edx,%esi
- xorl %edi,%ecx
- andl (%esp),%edx
- vpxor %xmm6,%xmm4,%xmm4
- movl %ebx,12(%esp)
- orl %esi,%edx
- rorxl $2,%ebx,%edi
- rorxl $13,%ebx,%esi
- vpsrld $10,%xmm7,%xmm6
- leal (%edx,%ecx,1),%edx
- rorxl $22,%ebx,%ecx
- xorl %edi,%esi
- vpxor %xmm5,%xmm4,%xmm4
- movl 16(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%ebx
- vpsrlq $17,%xmm7,%xmm5
- addl 8(%esp),%edx
- andl %ebx,%eax
- addl 84(%esp),%edx
- vpaddd %xmm4,%xmm3,%xmm3
- xorl %edi,%eax
- addl %edx,%ecx
- addl 24(%esp),%edx
- vpxor %xmm5,%xmm6,%xmm6
- leal (%eax,%ecx,1),%eax
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- vpsrlq $19,%xmm7,%xmm7
- movl %edx,24(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- vpxor %xmm7,%xmm6,%xmm6
- andnl (%esp),%edx,%esi
- xorl %edi,%ecx
- andl 28(%esp),%edx
- vpshufd $132,%xmm6,%xmm7
- movl %eax,8(%esp)
- orl %esi,%edx
- rorxl $2,%eax,%edi
- rorxl $13,%eax,%esi
- vpsrldq $8,%xmm7,%xmm7
- leal (%edx,%ecx,1),%edx
- rorxl $22,%eax,%ecx
- xorl %edi,%esi
- vpaddd %xmm7,%xmm3,%xmm3
- movl 12(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%eax
- vpshufd $80,%xmm3,%xmm7
- addl 4(%esp),%edx
- andl %eax,%ebx
- addl 88(%esp),%edx
- vpsrld $10,%xmm7,%xmm6
- xorl %edi,%ebx
- addl %edx,%ecx
- addl 20(%esp),%edx
- vpsrlq $17,%xmm7,%xmm5
- leal (%ebx,%ecx,1),%ebx
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- vpxor %xmm5,%xmm6,%xmm6
- movl %edx,20(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- vpsrlq $19,%xmm7,%xmm7
- andnl 28(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 24(%esp),%edx
- vpxor %xmm7,%xmm6,%xmm6
- movl %ebx,4(%esp)
- orl %esi,%edx
- rorxl $2,%ebx,%edi
- rorxl $13,%ebx,%esi
- vpshufd $232,%xmm6,%xmm7
- leal (%edx,%ecx,1),%edx
- rorxl $22,%ebx,%ecx
- xorl %edi,%esi
- vpslldq $8,%xmm7,%xmm7
- movl 8(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%ebx
- vpaddd %xmm7,%xmm3,%xmm3
- addl (%esp),%edx
- andl %ebx,%eax
- addl 92(%esp),%edx
- vpaddd 48(%ebp),%xmm3,%xmm6
- xorl %edi,%eax
- addl %edx,%ecx
- addl 16(%esp),%edx
- leal (%eax,%ecx,1),%eax
- vmovdqa %xmm6,80(%esp)
- cmpl $66051,64(%ebp)
- jne .L018avx_bmi_00_47
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,16(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl 24(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 20(%esp),%edx
- movl %eax,(%esp)
- orl %esi,%edx
- rorxl $2,%eax,%edi
- rorxl $13,%eax,%esi
- leal (%edx,%ecx,1),%edx
- rorxl $22,%eax,%ecx
- xorl %edi,%esi
- movl 4(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%eax
- addl 28(%esp),%edx
- andl %eax,%ebx
- addl 32(%esp),%edx
- xorl %edi,%ebx
- addl %edx,%ecx
- addl 12(%esp),%edx
- leal (%ebx,%ecx,1),%ebx
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,12(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl 20(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 16(%esp),%edx
- movl %ebx,28(%esp)
- orl %esi,%edx
- rorxl $2,%ebx,%edi
- rorxl $13,%ebx,%esi
- leal (%edx,%ecx,1),%edx
- rorxl $22,%ebx,%ecx
- xorl %edi,%esi
- movl (%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%ebx
- addl 24(%esp),%edx
- andl %ebx,%eax
- addl 36(%esp),%edx
- xorl %edi,%eax
- addl %edx,%ecx
- addl 8(%esp),%edx
- leal (%eax,%ecx,1),%eax
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,8(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl 16(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 12(%esp),%edx
- movl %eax,24(%esp)
- orl %esi,%edx
- rorxl $2,%eax,%edi
- rorxl $13,%eax,%esi
- leal (%edx,%ecx,1),%edx
- rorxl $22,%eax,%ecx
- xorl %edi,%esi
- movl 28(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%eax
- addl 20(%esp),%edx
- andl %eax,%ebx
- addl 40(%esp),%edx
- xorl %edi,%ebx
- addl %edx,%ecx
- addl 4(%esp),%edx
- leal (%ebx,%ecx,1),%ebx
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,4(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl 12(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 8(%esp),%edx
- movl %ebx,20(%esp)
- orl %esi,%edx
- rorxl $2,%ebx,%edi
- rorxl $13,%ebx,%esi
- leal (%edx,%ecx,1),%edx
- rorxl $22,%ebx,%ecx
- xorl %edi,%esi
- movl 24(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%ebx
- addl 16(%esp),%edx
- andl %ebx,%eax
- addl 44(%esp),%edx
- xorl %edi,%eax
- addl %edx,%ecx
- addl (%esp),%edx
- leal (%eax,%ecx,1),%eax
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl 8(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 4(%esp),%edx
- movl %eax,16(%esp)
- orl %esi,%edx
- rorxl $2,%eax,%edi
- rorxl $13,%eax,%esi
- leal (%edx,%ecx,1),%edx
- rorxl $22,%eax,%ecx
- xorl %edi,%esi
- movl 20(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%eax
- addl 12(%esp),%edx
- andl %eax,%ebx
- addl 48(%esp),%edx
- xorl %edi,%ebx
- addl %edx,%ecx
- addl 28(%esp),%edx
- leal (%ebx,%ecx,1),%ebx
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,28(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl 4(%esp),%edx,%esi
- xorl %edi,%ecx
- andl (%esp),%edx
- movl %ebx,12(%esp)
- orl %esi,%edx
- rorxl $2,%ebx,%edi
- rorxl $13,%ebx,%esi
- leal (%edx,%ecx,1),%edx
- rorxl $22,%ebx,%ecx
- xorl %edi,%esi
- movl 16(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%ebx
- addl 8(%esp),%edx
- andl %ebx,%eax
- addl 52(%esp),%edx
- xorl %edi,%eax
- addl %edx,%ecx
- addl 24(%esp),%edx
- leal (%eax,%ecx,1),%eax
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,24(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl (%esp),%edx,%esi
- xorl %edi,%ecx
- andl 28(%esp),%edx
- movl %eax,8(%esp)
- orl %esi,%edx
- rorxl $2,%eax,%edi
- rorxl $13,%eax,%esi
- leal (%edx,%ecx,1),%edx
- rorxl $22,%eax,%ecx
- xorl %edi,%esi
- movl 12(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%eax
- addl 4(%esp),%edx
- andl %eax,%ebx
- addl 56(%esp),%edx
- xorl %edi,%ebx
- addl %edx,%ecx
- addl 20(%esp),%edx
- leal (%ebx,%ecx,1),%ebx
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,20(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl 28(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 24(%esp),%edx
- movl %ebx,4(%esp)
- orl %esi,%edx
- rorxl $2,%ebx,%edi
- rorxl $13,%ebx,%esi
- leal (%edx,%ecx,1),%edx
- rorxl $22,%ebx,%ecx
- xorl %edi,%esi
- movl 8(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%ebx
- addl (%esp),%edx
- andl %ebx,%eax
- addl 60(%esp),%edx
- xorl %edi,%eax
- addl %edx,%ecx
- addl 16(%esp),%edx
- leal (%eax,%ecx,1),%eax
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,16(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl 24(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 20(%esp),%edx
- movl %eax,(%esp)
- orl %esi,%edx
- rorxl $2,%eax,%edi
- rorxl $13,%eax,%esi
- leal (%edx,%ecx,1),%edx
- rorxl $22,%eax,%ecx
- xorl %edi,%esi
- movl 4(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%eax
- addl 28(%esp),%edx
- andl %eax,%ebx
- addl 64(%esp),%edx
- xorl %edi,%ebx
- addl %edx,%ecx
- addl 12(%esp),%edx
- leal (%ebx,%ecx,1),%ebx
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,12(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl 20(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 16(%esp),%edx
- movl %ebx,28(%esp)
- orl %esi,%edx
- rorxl $2,%ebx,%edi
- rorxl $13,%ebx,%esi
- leal (%edx,%ecx,1),%edx
- rorxl $22,%ebx,%ecx
- xorl %edi,%esi
- movl (%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%ebx
- addl 24(%esp),%edx
- andl %ebx,%eax
- addl 68(%esp),%edx
- xorl %edi,%eax
- addl %edx,%ecx
- addl 8(%esp),%edx
- leal (%eax,%ecx,1),%eax
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,8(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl 16(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 12(%esp),%edx
- movl %eax,24(%esp)
- orl %esi,%edx
- rorxl $2,%eax,%edi
- rorxl $13,%eax,%esi
- leal (%edx,%ecx,1),%edx
- rorxl $22,%eax,%ecx
- xorl %edi,%esi
- movl 28(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%eax
- addl 20(%esp),%edx
- andl %eax,%ebx
- addl 72(%esp),%edx
- xorl %edi,%ebx
- addl %edx,%ecx
- addl 4(%esp),%edx
- leal (%ebx,%ecx,1),%ebx
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,4(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl 12(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 8(%esp),%edx
- movl %ebx,20(%esp)
- orl %esi,%edx
- rorxl $2,%ebx,%edi
- rorxl $13,%ebx,%esi
- leal (%edx,%ecx,1),%edx
- rorxl $22,%ebx,%ecx
- xorl %edi,%esi
- movl 24(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%ebx
- addl 16(%esp),%edx
- andl %ebx,%eax
- addl 76(%esp),%edx
- xorl %edi,%eax
- addl %edx,%ecx
- addl (%esp),%edx
- leal (%eax,%ecx,1),%eax
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl 8(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 4(%esp),%edx
- movl %eax,16(%esp)
- orl %esi,%edx
- rorxl $2,%eax,%edi
- rorxl $13,%eax,%esi
- leal (%edx,%ecx,1),%edx
- rorxl $22,%eax,%ecx
- xorl %edi,%esi
- movl 20(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%eax
- addl 12(%esp),%edx
- andl %eax,%ebx
- addl 80(%esp),%edx
- xorl %edi,%ebx
- addl %edx,%ecx
- addl 28(%esp),%edx
- leal (%ebx,%ecx,1),%ebx
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,28(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl 4(%esp),%edx,%esi
- xorl %edi,%ecx
- andl (%esp),%edx
- movl %ebx,12(%esp)
- orl %esi,%edx
- rorxl $2,%ebx,%edi
- rorxl $13,%ebx,%esi
- leal (%edx,%ecx,1),%edx
- rorxl $22,%ebx,%ecx
- xorl %edi,%esi
- movl 16(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%ebx
- addl 8(%esp),%edx
- andl %ebx,%eax
- addl 84(%esp),%edx
- xorl %edi,%eax
- addl %edx,%ecx
- addl 24(%esp),%edx
- leal (%eax,%ecx,1),%eax
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,24(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl (%esp),%edx,%esi
- xorl %edi,%ecx
- andl 28(%esp),%edx
- movl %eax,8(%esp)
- orl %esi,%edx
- rorxl $2,%eax,%edi
- rorxl $13,%eax,%esi
- leal (%edx,%ecx,1),%edx
- rorxl $22,%eax,%ecx
- xorl %edi,%esi
- movl 12(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%eax
- addl 4(%esp),%edx
- andl %eax,%ebx
- addl 88(%esp),%edx
- xorl %edi,%ebx
- addl %edx,%ecx
- addl 20(%esp),%edx
- leal (%ebx,%ecx,1),%ebx
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,20(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl 28(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 24(%esp),%edx
- movl %ebx,4(%esp)
- orl %esi,%edx
- rorxl $2,%ebx,%edi
- rorxl $13,%ebx,%esi
- leal (%edx,%ecx,1),%edx
- rorxl $22,%ebx,%ecx
- xorl %edi,%esi
- movl 8(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%ebx
- addl (%esp),%edx
- andl %ebx,%eax
- addl 92(%esp),%edx
- xorl %edi,%eax
- addl %edx,%ecx
- addl 16(%esp),%edx
- leal (%eax,%ecx,1),%eax
- movl 96(%esp),%esi
- xorl %edi,%ebx
- movl 12(%esp),%ecx
- addl (%esi),%eax
- addl 4(%esi),%ebx
- addl 8(%esi),%edi
- addl 12(%esi),%ecx
- movl %eax,(%esi)
- movl %ebx,4(%esi)
- movl %edi,8(%esi)
- movl %ecx,12(%esi)
- movl %ebx,4(%esp)
- xorl %edi,%ebx
- movl %edi,8(%esp)
- movl %ecx,12(%esp)
- movl 20(%esp),%edi
- movl 24(%esp),%ecx
- addl 16(%esi),%edx
- addl 20(%esi),%edi
- addl 24(%esi),%ecx
- movl %edx,16(%esi)
- movl %edi,20(%esi)
- movl %edi,20(%esp)
- movl 28(%esp),%edi
- movl %ecx,24(%esi)
- addl 28(%esi),%edi
- movl %ecx,24(%esp)
- movl %edi,28(%esi)
- movl %edi,28(%esp)
- movl 100(%esp),%edi
- vmovdqa 64(%ebp),%xmm7
- subl $192,%ebp
- cmpl 104(%esp),%edi
- jb .L017grand_avx_bmi
- movl 108(%esp),%esp
- vzeroall
- popl %edi
- popl %esi
- popl %ebx
- popl %ebp
- ret
.size sha256_block_data_order,.-.L_sha256_block_data_order_begin
.comm OPENSSL_ia32cap_P,16,4
#else
@@ -6825,13 +4619,12 @@ sha256_block_data_order:
orl %ebx,%ecx
andl $1342177280,%ecx
cmpl $1342177280,%ecx
- je .L005AVX
testl $512,%ebx
- jnz .L006SSSE3
+ jnz .L005SSSE3
.L003no_xmm:
subl %edi,%eax
cmpl $256,%eax
- jae .L007unrolled
+ jae .L006unrolled
jmp .L002loop
.align 16
.L002loop:
@@ -6903,7 +4696,7 @@ sha256_block_data_order:
movl %ecx,28(%esp)
movl %edi,32(%esp)
.align 16
-.L00800_15:
+.L00700_15:
movl %edx,%ecx
movl 24(%esp),%esi
rorl $14,%ecx
@@ -6941,11 +4734,11 @@ sha256_block_data_order:
addl $4,%ebp
addl %ebx,%eax
cmpl $3248222580,%esi
- jne .L00800_15
+ jne .L00700_15
movl 156(%esp),%ecx
- jmp .L00916_63
+ jmp .L00816_63
.align 16
-.L00916_63:
+.L00816_63:
movl %ecx,%ebx
movl 104(%esp),%esi
rorl $11,%ecx
@@ -7000,7 +4793,7 @@ sha256_block_data_order:
addl $4,%ebp
addl %ebx,%eax
cmpl $3329325298,%esi
- jne .L00916_63
+ jne .L00816_63
movl 356(%esp),%esi
movl 8(%esp),%ebx
movl 16(%esp),%ecx
@@ -7044,7 +4837,7 @@ sha256_block_data_order:
.byte 112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103
.byte 62,0
.align 16
-.L007unrolled:
+.L006unrolled:
leal -96(%esp),%esp
movl (%esi),%eax
movl 4(%esi),%ebp
@@ -7061,9 +4854,9 @@ sha256_block_data_order:
movl %ebx,20(%esp)
movl %ecx,24(%esp)
movl %esi,28(%esp)
- jmp .L010grand_loop
+ jmp .L009grand_loop
.align 16
-.L010grand_loop:
+.L009grand_loop:
movl (%edi),%ebx
movl 4(%edi),%ecx
bswap %ebx
@@ -9943,7 +7736,7 @@ sha256_block_data_order:
movl %ebx,24(%esp)
movl %ecx,28(%esp)
cmpl 104(%esp),%edi
- jb .L010grand_loop
+ jb .L009grand_loop
movl 108(%esp),%esp
popl %edi
popl %esi
@@ -9962,9 +7755,9 @@ sha256_block_data_order:
pshufd $27,%xmm2,%xmm2
.byte 102,15,58,15,202,8
punpcklqdq %xmm0,%xmm2
- jmp .L011loop_shaext
+ jmp .L010loop_shaext
.align 16
-.L011loop_shaext:
+.L010loop_shaext:
movdqu (%edi),%xmm3
movdqu 16(%edi),%xmm4
movdqu 32(%edi),%xmm5
@@ -10134,7 +7927,7 @@ sha256_block_data_order:
.byte 15,56,203,202
paddd 16(%esp),%xmm2
paddd (%esp),%xmm1
- jnz .L011loop_shaext
+ jnz .L010loop_shaext
pshufd $177,%xmm2,%xmm2
pshufd $27,%xmm1,%xmm7
pshufd $177,%xmm1,%xmm1
@@ -10149,7 +7942,7 @@ sha256_block_data_order:
popl %ebp
ret
.align 32
-.L006SSSE3:
+.L005SSSE3:
leal -96(%esp),%esp
movl (%esi),%eax
movl 4(%esi),%ebx
@@ -10168,9 +7961,9 @@ sha256_block_data_order:
movl %ecx,24(%esp)
movl %esi,28(%esp)
movdqa 256(%ebp),%xmm7
- jmp .L012grand_ssse3
+ jmp .L011grand_ssse3
.align 16
-.L012grand_ssse3:
+.L011grand_ssse3:
movdqu (%edi),%xmm0
movdqu 16(%edi),%xmm1
movdqu 32(%edi),%xmm2
@@ -10193,9 +7986,9 @@ sha256_block_data_order:
paddd %xmm3,%xmm7
movdqa %xmm6,64(%esp)
movdqa %xmm7,80(%esp)
- jmp .L013ssse3_00_47
+ jmp .L012ssse3_00_47
.align 16
-.L013ssse3_00_47:
+.L012ssse3_00_47:
addl $64,%ebp
movl %edx,%ecx
movdqa %xmm1,%xmm4
@@ -10838,7 +8631,7 @@ sha256_block_data_order:
addl %ecx,%eax
movdqa %xmm6,80(%esp)
cmpl $66051,64(%ebp)
- jne .L013ssse3_00_47
+ jne .L012ssse3_00_47
movl %edx,%ecx
rorl $14,%edx
movl 20(%esp),%esi
@@ -11352,2213 +9145,8 @@ sha256_block_data_order:
movdqa 64(%ebp),%xmm7
subl $192,%ebp
cmpl 104(%esp),%edi
- jb .L012grand_ssse3
- movl 108(%esp),%esp
- popl %edi
- popl %esi
- popl %ebx
- popl %ebp
- ret
-.align 32
-.L005AVX:
- andl $264,%edx
- cmpl $264,%edx
- je .L014AVX_BMI
- leal -96(%esp),%esp
- vzeroall
- movl (%esi),%eax
- movl 4(%esi),%ebx
- movl 8(%esi),%ecx
- movl 12(%esi),%edi
- movl %ebx,4(%esp)
- xorl %ecx,%ebx
- movl %ecx,8(%esp)
- movl %edi,12(%esp)
- movl 16(%esi),%edx
- movl 20(%esi),%edi
- movl 24(%esi),%ecx
- movl 28(%esi),%esi
- movl %edi,20(%esp)
- movl 100(%esp),%edi
- movl %ecx,24(%esp)
- movl %esi,28(%esp)
- vmovdqa 256(%ebp),%xmm7
- jmp .L015grand_avx
-.align 32
-.L015grand_avx:
- vmovdqu (%edi),%xmm0
- vmovdqu 16(%edi),%xmm1
- vmovdqu 32(%edi),%xmm2
- vmovdqu 48(%edi),%xmm3
- addl $64,%edi
- vpshufb %xmm7,%xmm0,%xmm0
- movl %edi,100(%esp)
- vpshufb %xmm7,%xmm1,%xmm1
- vpshufb %xmm7,%xmm2,%xmm2
- vpaddd (%ebp),%xmm0,%xmm4
- vpshufb %xmm7,%xmm3,%xmm3
- vpaddd 16(%ebp),%xmm1,%xmm5
- vpaddd 32(%ebp),%xmm2,%xmm6
- vpaddd 48(%ebp),%xmm3,%xmm7
- vmovdqa %xmm4,32(%esp)
- vmovdqa %xmm5,48(%esp)
- vmovdqa %xmm6,64(%esp)
- vmovdqa %xmm7,80(%esp)
- jmp .L016avx_00_47
-.align 16
-.L016avx_00_47:
- addl $64,%ebp
- vpalignr $4,%xmm0,%xmm1,%xmm4
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 20(%esp),%esi
- vpalignr $4,%xmm2,%xmm3,%xmm7
- xorl %ecx,%edx
- movl 24(%esp),%edi
- xorl %edi,%esi
- vpsrld $7,%xmm4,%xmm6
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,16(%esp)
- vpaddd %xmm7,%xmm0,%xmm0
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- vpsrld $3,%xmm4,%xmm7
- movl %eax,%ecx
- addl %edi,%edx
- movl 4(%esp),%edi
- vpslld $14,%xmm4,%xmm5
- movl %eax,%esi
- shrdl $9,%ecx,%ecx
- movl %eax,(%esp)
- vpxor %xmm6,%xmm7,%xmm4
- xorl %eax,%ecx
- xorl %edi,%eax
- addl 28(%esp),%edx
- vpshufd $250,%xmm3,%xmm7
- shrdl $11,%ecx,%ecx
- andl %eax,%ebx
- xorl %esi,%ecx
- vpsrld $11,%xmm6,%xmm6
- addl 32(%esp),%edx
- xorl %edi,%ebx
- shrdl $2,%ecx,%ecx
- vpxor %xmm5,%xmm4,%xmm4
- addl %edx,%ebx
- addl 12(%esp),%edx
- addl %ecx,%ebx
- vpslld $11,%xmm5,%xmm5
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 16(%esp),%esi
- vpxor %xmm6,%xmm4,%xmm4
- xorl %ecx,%edx
- movl 20(%esp),%edi
- xorl %edi,%esi
- vpsrld $10,%xmm7,%xmm6
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,12(%esp)
- vpxor %xmm5,%xmm4,%xmm4
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- vpsrlq $17,%xmm7,%xmm5
- movl %ebx,%ecx
- addl %edi,%edx
- movl (%esp),%edi
- vpaddd %xmm4,%xmm0,%xmm0
- movl %ebx,%esi
- shrdl $9,%ecx,%ecx
- movl %ebx,28(%esp)
- vpxor %xmm5,%xmm6,%xmm6
- xorl %ebx,%ecx
- xorl %edi,%ebx
- addl 24(%esp),%edx
- vpsrlq $19,%xmm7,%xmm7
- shrdl $11,%ecx,%ecx
- andl %ebx,%eax
- xorl %esi,%ecx
- vpxor %xmm7,%xmm6,%xmm6
- addl 36(%esp),%edx
- xorl %edi,%eax
- shrdl $2,%ecx,%ecx
- vpshufd $132,%xmm6,%xmm7
- addl %edx,%eax
- addl 8(%esp),%edx
- addl %ecx,%eax
- vpsrldq $8,%xmm7,%xmm7
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 12(%esp),%esi
- vpaddd %xmm7,%xmm0,%xmm0
- xorl %ecx,%edx
- movl 16(%esp),%edi
- xorl %edi,%esi
- vpshufd $80,%xmm0,%xmm7
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,8(%esp)
- vpsrld $10,%xmm7,%xmm6
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- vpsrlq $17,%xmm7,%xmm5
- movl %eax,%ecx
- addl %edi,%edx
- movl 28(%esp),%edi
- vpxor %xmm5,%xmm6,%xmm6
- movl %eax,%esi
- shrdl $9,%ecx,%ecx
- movl %eax,24(%esp)
- vpsrlq $19,%xmm7,%xmm7
- xorl %eax,%ecx
- xorl %edi,%eax
- addl 20(%esp),%edx
- vpxor %xmm7,%xmm6,%xmm6
- shrdl $11,%ecx,%ecx
- andl %eax,%ebx
- xorl %esi,%ecx
- vpshufd $232,%xmm6,%xmm7
- addl 40(%esp),%edx
- xorl %edi,%ebx
- shrdl $2,%ecx,%ecx
- vpslldq $8,%xmm7,%xmm7
- addl %edx,%ebx
- addl 4(%esp),%edx
- addl %ecx,%ebx
- vpaddd %xmm7,%xmm0,%xmm0
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 8(%esp),%esi
- vpaddd (%ebp),%xmm0,%xmm6
- xorl %ecx,%edx
- movl 12(%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,4(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %ebx,%ecx
- addl %edi,%edx
- movl 24(%esp),%edi
- movl %ebx,%esi
- shrdl $9,%ecx,%ecx
- movl %ebx,20(%esp)
- xorl %ebx,%ecx
- xorl %edi,%ebx
- addl 16(%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %ebx,%eax
- xorl %esi,%ecx
- addl 44(%esp),%edx
- xorl %edi,%eax
- shrdl $2,%ecx,%ecx
- addl %edx,%eax
- addl (%esp),%edx
- addl %ecx,%eax
- vmovdqa %xmm6,32(%esp)
- vpalignr $4,%xmm1,%xmm2,%xmm4
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 4(%esp),%esi
- vpalignr $4,%xmm3,%xmm0,%xmm7
- xorl %ecx,%edx
- movl 8(%esp),%edi
- xorl %edi,%esi
- vpsrld $7,%xmm4,%xmm6
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,(%esp)
- vpaddd %xmm7,%xmm1,%xmm1
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- vpsrld $3,%xmm4,%xmm7
- movl %eax,%ecx
- addl %edi,%edx
- movl 20(%esp),%edi
- vpslld $14,%xmm4,%xmm5
- movl %eax,%esi
- shrdl $9,%ecx,%ecx
- movl %eax,16(%esp)
- vpxor %xmm6,%xmm7,%xmm4
- xorl %eax,%ecx
- xorl %edi,%eax
- addl 12(%esp),%edx
- vpshufd $250,%xmm0,%xmm7
- shrdl $11,%ecx,%ecx
- andl %eax,%ebx
- xorl %esi,%ecx
- vpsrld $11,%xmm6,%xmm6
- addl 48(%esp),%edx
- xorl %edi,%ebx
- shrdl $2,%ecx,%ecx
- vpxor %xmm5,%xmm4,%xmm4
- addl %edx,%ebx
- addl 28(%esp),%edx
- addl %ecx,%ebx
- vpslld $11,%xmm5,%xmm5
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl (%esp),%esi
- vpxor %xmm6,%xmm4,%xmm4
- xorl %ecx,%edx
- movl 4(%esp),%edi
- xorl %edi,%esi
- vpsrld $10,%xmm7,%xmm6
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,28(%esp)
- vpxor %xmm5,%xmm4,%xmm4
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- vpsrlq $17,%xmm7,%xmm5
- movl %ebx,%ecx
- addl %edi,%edx
- movl 16(%esp),%edi
- vpaddd %xmm4,%xmm1,%xmm1
- movl %ebx,%esi
- shrdl $9,%ecx,%ecx
- movl %ebx,12(%esp)
- vpxor %xmm5,%xmm6,%xmm6
- xorl %ebx,%ecx
- xorl %edi,%ebx
- addl 8(%esp),%edx
- vpsrlq $19,%xmm7,%xmm7
- shrdl $11,%ecx,%ecx
- andl %ebx,%eax
- xorl %esi,%ecx
- vpxor %xmm7,%xmm6,%xmm6
- addl 52(%esp),%edx
- xorl %edi,%eax
- shrdl $2,%ecx,%ecx
- vpshufd $132,%xmm6,%xmm7
- addl %edx,%eax
- addl 24(%esp),%edx
- addl %ecx,%eax
- vpsrldq $8,%xmm7,%xmm7
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 28(%esp),%esi
- vpaddd %xmm7,%xmm1,%xmm1
- xorl %ecx,%edx
- movl (%esp),%edi
- xorl %edi,%esi
- vpshufd $80,%xmm1,%xmm7
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,24(%esp)
- vpsrld $10,%xmm7,%xmm6
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- vpsrlq $17,%xmm7,%xmm5
- movl %eax,%ecx
- addl %edi,%edx
- movl 12(%esp),%edi
- vpxor %xmm5,%xmm6,%xmm6
- movl %eax,%esi
- shrdl $9,%ecx,%ecx
- movl %eax,8(%esp)
- vpsrlq $19,%xmm7,%xmm7
- xorl %eax,%ecx
- xorl %edi,%eax
- addl 4(%esp),%edx
- vpxor %xmm7,%xmm6,%xmm6
- shrdl $11,%ecx,%ecx
- andl %eax,%ebx
- xorl %esi,%ecx
- vpshufd $232,%xmm6,%xmm7
- addl 56(%esp),%edx
- xorl %edi,%ebx
- shrdl $2,%ecx,%ecx
- vpslldq $8,%xmm7,%xmm7
- addl %edx,%ebx
- addl 20(%esp),%edx
- addl %ecx,%ebx
- vpaddd %xmm7,%xmm1,%xmm1
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 24(%esp),%esi
- vpaddd 16(%ebp),%xmm1,%xmm6
- xorl %ecx,%edx
- movl 28(%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,20(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %ebx,%ecx
- addl %edi,%edx
- movl 8(%esp),%edi
- movl %ebx,%esi
- shrdl $9,%ecx,%ecx
- movl %ebx,4(%esp)
- xorl %ebx,%ecx
- xorl %edi,%ebx
- addl (%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %ebx,%eax
- xorl %esi,%ecx
- addl 60(%esp),%edx
- xorl %edi,%eax
- shrdl $2,%ecx,%ecx
- addl %edx,%eax
- addl 16(%esp),%edx
- addl %ecx,%eax
- vmovdqa %xmm6,48(%esp)
- vpalignr $4,%xmm2,%xmm3,%xmm4
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 20(%esp),%esi
- vpalignr $4,%xmm0,%xmm1,%xmm7
- xorl %ecx,%edx
- movl 24(%esp),%edi
- xorl %edi,%esi
- vpsrld $7,%xmm4,%xmm6
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,16(%esp)
- vpaddd %xmm7,%xmm2,%xmm2
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- vpsrld $3,%xmm4,%xmm7
- movl %eax,%ecx
- addl %edi,%edx
- movl 4(%esp),%edi
- vpslld $14,%xmm4,%xmm5
- movl %eax,%esi
- shrdl $9,%ecx,%ecx
- movl %eax,(%esp)
- vpxor %xmm6,%xmm7,%xmm4
- xorl %eax,%ecx
- xorl %edi,%eax
- addl 28(%esp),%edx
- vpshufd $250,%xmm1,%xmm7
- shrdl $11,%ecx,%ecx
- andl %eax,%ebx
- xorl %esi,%ecx
- vpsrld $11,%xmm6,%xmm6
- addl 64(%esp),%edx
- xorl %edi,%ebx
- shrdl $2,%ecx,%ecx
- vpxor %xmm5,%xmm4,%xmm4
- addl %edx,%ebx
- addl 12(%esp),%edx
- addl %ecx,%ebx
- vpslld $11,%xmm5,%xmm5
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 16(%esp),%esi
- vpxor %xmm6,%xmm4,%xmm4
- xorl %ecx,%edx
- movl 20(%esp),%edi
- xorl %edi,%esi
- vpsrld $10,%xmm7,%xmm6
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,12(%esp)
- vpxor %xmm5,%xmm4,%xmm4
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- vpsrlq $17,%xmm7,%xmm5
- movl %ebx,%ecx
- addl %edi,%edx
- movl (%esp),%edi
- vpaddd %xmm4,%xmm2,%xmm2
- movl %ebx,%esi
- shrdl $9,%ecx,%ecx
- movl %ebx,28(%esp)
- vpxor %xmm5,%xmm6,%xmm6
- xorl %ebx,%ecx
- xorl %edi,%ebx
- addl 24(%esp),%edx
- vpsrlq $19,%xmm7,%xmm7
- shrdl $11,%ecx,%ecx
- andl %ebx,%eax
- xorl %esi,%ecx
- vpxor %xmm7,%xmm6,%xmm6
- addl 68(%esp),%edx
- xorl %edi,%eax
- shrdl $2,%ecx,%ecx
- vpshufd $132,%xmm6,%xmm7
- addl %edx,%eax
- addl 8(%esp),%edx
- addl %ecx,%eax
- vpsrldq $8,%xmm7,%xmm7
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 12(%esp),%esi
- vpaddd %xmm7,%xmm2,%xmm2
- xorl %ecx,%edx
- movl 16(%esp),%edi
- xorl %edi,%esi
- vpshufd $80,%xmm2,%xmm7
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,8(%esp)
- vpsrld $10,%xmm7,%xmm6
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- vpsrlq $17,%xmm7,%xmm5
- movl %eax,%ecx
- addl %edi,%edx
- movl 28(%esp),%edi
- vpxor %xmm5,%xmm6,%xmm6
- movl %eax,%esi
- shrdl $9,%ecx,%ecx
- movl %eax,24(%esp)
- vpsrlq $19,%xmm7,%xmm7
- xorl %eax,%ecx
- xorl %edi,%eax
- addl 20(%esp),%edx
- vpxor %xmm7,%xmm6,%xmm6
- shrdl $11,%ecx,%ecx
- andl %eax,%ebx
- xorl %esi,%ecx
- vpshufd $232,%xmm6,%xmm7
- addl 72(%esp),%edx
- xorl %edi,%ebx
- shrdl $2,%ecx,%ecx
- vpslldq $8,%xmm7,%xmm7
- addl %edx,%ebx
- addl 4(%esp),%edx
- addl %ecx,%ebx
- vpaddd %xmm7,%xmm2,%xmm2
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 8(%esp),%esi
- vpaddd 32(%ebp),%xmm2,%xmm6
- xorl %ecx,%edx
- movl 12(%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,4(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %ebx,%ecx
- addl %edi,%edx
- movl 24(%esp),%edi
- movl %ebx,%esi
- shrdl $9,%ecx,%ecx
- movl %ebx,20(%esp)
- xorl %ebx,%ecx
- xorl %edi,%ebx
- addl 16(%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %ebx,%eax
- xorl %esi,%ecx
- addl 76(%esp),%edx
- xorl %edi,%eax
- shrdl $2,%ecx,%ecx
- addl %edx,%eax
- addl (%esp),%edx
- addl %ecx,%eax
- vmovdqa %xmm6,64(%esp)
- vpalignr $4,%xmm3,%xmm0,%xmm4
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 4(%esp),%esi
- vpalignr $4,%xmm1,%xmm2,%xmm7
- xorl %ecx,%edx
- movl 8(%esp),%edi
- xorl %edi,%esi
- vpsrld $7,%xmm4,%xmm6
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,(%esp)
- vpaddd %xmm7,%xmm3,%xmm3
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- vpsrld $3,%xmm4,%xmm7
- movl %eax,%ecx
- addl %edi,%edx
- movl 20(%esp),%edi
- vpslld $14,%xmm4,%xmm5
- movl %eax,%esi
- shrdl $9,%ecx,%ecx
- movl %eax,16(%esp)
- vpxor %xmm6,%xmm7,%xmm4
- xorl %eax,%ecx
- xorl %edi,%eax
- addl 12(%esp),%edx
- vpshufd $250,%xmm2,%xmm7
- shrdl $11,%ecx,%ecx
- andl %eax,%ebx
- xorl %esi,%ecx
- vpsrld $11,%xmm6,%xmm6
- addl 80(%esp),%edx
- xorl %edi,%ebx
- shrdl $2,%ecx,%ecx
- vpxor %xmm5,%xmm4,%xmm4
- addl %edx,%ebx
- addl 28(%esp),%edx
- addl %ecx,%ebx
- vpslld $11,%xmm5,%xmm5
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl (%esp),%esi
- vpxor %xmm6,%xmm4,%xmm4
- xorl %ecx,%edx
- movl 4(%esp),%edi
- xorl %edi,%esi
- vpsrld $10,%xmm7,%xmm6
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,28(%esp)
- vpxor %xmm5,%xmm4,%xmm4
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- vpsrlq $17,%xmm7,%xmm5
- movl %ebx,%ecx
- addl %edi,%edx
- movl 16(%esp),%edi
- vpaddd %xmm4,%xmm3,%xmm3
- movl %ebx,%esi
- shrdl $9,%ecx,%ecx
- movl %ebx,12(%esp)
- vpxor %xmm5,%xmm6,%xmm6
- xorl %ebx,%ecx
- xorl %edi,%ebx
- addl 8(%esp),%edx
- vpsrlq $19,%xmm7,%xmm7
- shrdl $11,%ecx,%ecx
- andl %ebx,%eax
- xorl %esi,%ecx
- vpxor %xmm7,%xmm6,%xmm6
- addl 84(%esp),%edx
- xorl %edi,%eax
- shrdl $2,%ecx,%ecx
- vpshufd $132,%xmm6,%xmm7
- addl %edx,%eax
- addl 24(%esp),%edx
- addl %ecx,%eax
- vpsrldq $8,%xmm7,%xmm7
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 28(%esp),%esi
- vpaddd %xmm7,%xmm3,%xmm3
- xorl %ecx,%edx
- movl (%esp),%edi
- xorl %edi,%esi
- vpshufd $80,%xmm3,%xmm7
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,24(%esp)
- vpsrld $10,%xmm7,%xmm6
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- vpsrlq $17,%xmm7,%xmm5
- movl %eax,%ecx
- addl %edi,%edx
- movl 12(%esp),%edi
- vpxor %xmm5,%xmm6,%xmm6
- movl %eax,%esi
- shrdl $9,%ecx,%ecx
- movl %eax,8(%esp)
- vpsrlq $19,%xmm7,%xmm7
- xorl %eax,%ecx
- xorl %edi,%eax
- addl 4(%esp),%edx
- vpxor %xmm7,%xmm6,%xmm6
- shrdl $11,%ecx,%ecx
- andl %eax,%ebx
- xorl %esi,%ecx
- vpshufd $232,%xmm6,%xmm7
- addl 88(%esp),%edx
- xorl %edi,%ebx
- shrdl $2,%ecx,%ecx
- vpslldq $8,%xmm7,%xmm7
- addl %edx,%ebx
- addl 20(%esp),%edx
- addl %ecx,%ebx
- vpaddd %xmm7,%xmm3,%xmm3
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 24(%esp),%esi
- vpaddd 48(%ebp),%xmm3,%xmm6
- xorl %ecx,%edx
- movl 28(%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,20(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %ebx,%ecx
- addl %edi,%edx
- movl 8(%esp),%edi
- movl %ebx,%esi
- shrdl $9,%ecx,%ecx
- movl %ebx,4(%esp)
- xorl %ebx,%ecx
- xorl %edi,%ebx
- addl (%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %ebx,%eax
- xorl %esi,%ecx
- addl 92(%esp),%edx
- xorl %edi,%eax
- shrdl $2,%ecx,%ecx
- addl %edx,%eax
- addl 16(%esp),%edx
- addl %ecx,%eax
- vmovdqa %xmm6,80(%esp)
- cmpl $66051,64(%ebp)
- jne .L016avx_00_47
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 20(%esp),%esi
- xorl %ecx,%edx
- movl 24(%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,16(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %eax,%ecx
- addl %edi,%edx
- movl 4(%esp),%edi
- movl %eax,%esi
- shrdl $9,%ecx,%ecx
- movl %eax,(%esp)
- xorl %eax,%ecx
- xorl %edi,%eax
- addl 28(%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %eax,%ebx
- xorl %esi,%ecx
- addl 32(%esp),%edx
- xorl %edi,%ebx
- shrdl $2,%ecx,%ecx
- addl %edx,%ebx
- addl 12(%esp),%edx
- addl %ecx,%ebx
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 16(%esp),%esi
- xorl %ecx,%edx
- movl 20(%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,12(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %ebx,%ecx
- addl %edi,%edx
- movl (%esp),%edi
- movl %ebx,%esi
- shrdl $9,%ecx,%ecx
- movl %ebx,28(%esp)
- xorl %ebx,%ecx
- xorl %edi,%ebx
- addl 24(%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %ebx,%eax
- xorl %esi,%ecx
- addl 36(%esp),%edx
- xorl %edi,%eax
- shrdl $2,%ecx,%ecx
- addl %edx,%eax
- addl 8(%esp),%edx
- addl %ecx,%eax
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 12(%esp),%esi
- xorl %ecx,%edx
- movl 16(%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,8(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %eax,%ecx
- addl %edi,%edx
- movl 28(%esp),%edi
- movl %eax,%esi
- shrdl $9,%ecx,%ecx
- movl %eax,24(%esp)
- xorl %eax,%ecx
- xorl %edi,%eax
- addl 20(%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %eax,%ebx
- xorl %esi,%ecx
- addl 40(%esp),%edx
- xorl %edi,%ebx
- shrdl $2,%ecx,%ecx
- addl %edx,%ebx
- addl 4(%esp),%edx
- addl %ecx,%ebx
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 8(%esp),%esi
- xorl %ecx,%edx
- movl 12(%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,4(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %ebx,%ecx
- addl %edi,%edx
- movl 24(%esp),%edi
- movl %ebx,%esi
- shrdl $9,%ecx,%ecx
- movl %ebx,20(%esp)
- xorl %ebx,%ecx
- xorl %edi,%ebx
- addl 16(%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %ebx,%eax
- xorl %esi,%ecx
- addl 44(%esp),%edx
- xorl %edi,%eax
- shrdl $2,%ecx,%ecx
- addl %edx,%eax
- addl (%esp),%edx
- addl %ecx,%eax
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 4(%esp),%esi
- xorl %ecx,%edx
- movl 8(%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %eax,%ecx
- addl %edi,%edx
- movl 20(%esp),%edi
- movl %eax,%esi
- shrdl $9,%ecx,%ecx
- movl %eax,16(%esp)
- xorl %eax,%ecx
- xorl %edi,%eax
- addl 12(%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %eax,%ebx
- xorl %esi,%ecx
- addl 48(%esp),%edx
- xorl %edi,%ebx
- shrdl $2,%ecx,%ecx
- addl %edx,%ebx
- addl 28(%esp),%edx
- addl %ecx,%ebx
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl (%esp),%esi
- xorl %ecx,%edx
- movl 4(%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,28(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %ebx,%ecx
- addl %edi,%edx
- movl 16(%esp),%edi
- movl %ebx,%esi
- shrdl $9,%ecx,%ecx
- movl %ebx,12(%esp)
- xorl %ebx,%ecx
- xorl %edi,%ebx
- addl 8(%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %ebx,%eax
- xorl %esi,%ecx
- addl 52(%esp),%edx
- xorl %edi,%eax
- shrdl $2,%ecx,%ecx
- addl %edx,%eax
- addl 24(%esp),%edx
- addl %ecx,%eax
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 28(%esp),%esi
- xorl %ecx,%edx
- movl (%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,24(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %eax,%ecx
- addl %edi,%edx
- movl 12(%esp),%edi
- movl %eax,%esi
- shrdl $9,%ecx,%ecx
- movl %eax,8(%esp)
- xorl %eax,%ecx
- xorl %edi,%eax
- addl 4(%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %eax,%ebx
- xorl %esi,%ecx
- addl 56(%esp),%edx
- xorl %edi,%ebx
- shrdl $2,%ecx,%ecx
- addl %edx,%ebx
- addl 20(%esp),%edx
- addl %ecx,%ebx
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 24(%esp),%esi
- xorl %ecx,%edx
- movl 28(%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,20(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %ebx,%ecx
- addl %edi,%edx
- movl 8(%esp),%edi
- movl %ebx,%esi
- shrdl $9,%ecx,%ecx
- movl %ebx,4(%esp)
- xorl %ebx,%ecx
- xorl %edi,%ebx
- addl (%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %ebx,%eax
- xorl %esi,%ecx
- addl 60(%esp),%edx
- xorl %edi,%eax
- shrdl $2,%ecx,%ecx
- addl %edx,%eax
- addl 16(%esp),%edx
- addl %ecx,%eax
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 20(%esp),%esi
- xorl %ecx,%edx
- movl 24(%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,16(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %eax,%ecx
- addl %edi,%edx
- movl 4(%esp),%edi
- movl %eax,%esi
- shrdl $9,%ecx,%ecx
- movl %eax,(%esp)
- xorl %eax,%ecx
- xorl %edi,%eax
- addl 28(%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %eax,%ebx
- xorl %esi,%ecx
- addl 64(%esp),%edx
- xorl %edi,%ebx
- shrdl $2,%ecx,%ecx
- addl %edx,%ebx
- addl 12(%esp),%edx
- addl %ecx,%ebx
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 16(%esp),%esi
- xorl %ecx,%edx
- movl 20(%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,12(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %ebx,%ecx
- addl %edi,%edx
- movl (%esp),%edi
- movl %ebx,%esi
- shrdl $9,%ecx,%ecx
- movl %ebx,28(%esp)
- xorl %ebx,%ecx
- xorl %edi,%ebx
- addl 24(%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %ebx,%eax
- xorl %esi,%ecx
- addl 68(%esp),%edx
- xorl %edi,%eax
- shrdl $2,%ecx,%ecx
- addl %edx,%eax
- addl 8(%esp),%edx
- addl %ecx,%eax
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 12(%esp),%esi
- xorl %ecx,%edx
- movl 16(%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,8(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %eax,%ecx
- addl %edi,%edx
- movl 28(%esp),%edi
- movl %eax,%esi
- shrdl $9,%ecx,%ecx
- movl %eax,24(%esp)
- xorl %eax,%ecx
- xorl %edi,%eax
- addl 20(%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %eax,%ebx
- xorl %esi,%ecx
- addl 72(%esp),%edx
- xorl %edi,%ebx
- shrdl $2,%ecx,%ecx
- addl %edx,%ebx
- addl 4(%esp),%edx
- addl %ecx,%ebx
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 8(%esp),%esi
- xorl %ecx,%edx
- movl 12(%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,4(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %ebx,%ecx
- addl %edi,%edx
- movl 24(%esp),%edi
- movl %ebx,%esi
- shrdl $9,%ecx,%ecx
- movl %ebx,20(%esp)
- xorl %ebx,%ecx
- xorl %edi,%ebx
- addl 16(%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %ebx,%eax
- xorl %esi,%ecx
- addl 76(%esp),%edx
- xorl %edi,%eax
- shrdl $2,%ecx,%ecx
- addl %edx,%eax
- addl (%esp),%edx
- addl %ecx,%eax
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 4(%esp),%esi
- xorl %ecx,%edx
- movl 8(%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %eax,%ecx
- addl %edi,%edx
- movl 20(%esp),%edi
- movl %eax,%esi
- shrdl $9,%ecx,%ecx
- movl %eax,16(%esp)
- xorl %eax,%ecx
- xorl %edi,%eax
- addl 12(%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %eax,%ebx
- xorl %esi,%ecx
- addl 80(%esp),%edx
- xorl %edi,%ebx
- shrdl $2,%ecx,%ecx
- addl %edx,%ebx
- addl 28(%esp),%edx
- addl %ecx,%ebx
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl (%esp),%esi
- xorl %ecx,%edx
- movl 4(%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,28(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %ebx,%ecx
- addl %edi,%edx
- movl 16(%esp),%edi
- movl %ebx,%esi
- shrdl $9,%ecx,%ecx
- movl %ebx,12(%esp)
- xorl %ebx,%ecx
- xorl %edi,%ebx
- addl 8(%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %ebx,%eax
- xorl %esi,%ecx
- addl 84(%esp),%edx
- xorl %edi,%eax
- shrdl $2,%ecx,%ecx
- addl %edx,%eax
- addl 24(%esp),%edx
- addl %ecx,%eax
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 28(%esp),%esi
- xorl %ecx,%edx
- movl (%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,24(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %eax,%ecx
- addl %edi,%edx
- movl 12(%esp),%edi
- movl %eax,%esi
- shrdl $9,%ecx,%ecx
- movl %eax,8(%esp)
- xorl %eax,%ecx
- xorl %edi,%eax
- addl 4(%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %eax,%ebx
- xorl %esi,%ecx
- addl 88(%esp),%edx
- xorl %edi,%ebx
- shrdl $2,%ecx,%ecx
- addl %edx,%ebx
- addl 20(%esp),%edx
- addl %ecx,%ebx
- movl %edx,%ecx
- shrdl $14,%edx,%edx
- movl 24(%esp),%esi
- xorl %ecx,%edx
- movl 28(%esp),%edi
- xorl %edi,%esi
- shrdl $5,%edx,%edx
- andl %ecx,%esi
- movl %ecx,20(%esp)
- xorl %ecx,%edx
- xorl %esi,%edi
- shrdl $6,%edx,%edx
- movl %ebx,%ecx
- addl %edi,%edx
- movl 8(%esp),%edi
- movl %ebx,%esi
- shrdl $9,%ecx,%ecx
- movl %ebx,4(%esp)
- xorl %ebx,%ecx
- xorl %edi,%ebx
- addl (%esp),%edx
- shrdl $11,%ecx,%ecx
- andl %ebx,%eax
- xorl %esi,%ecx
- addl 92(%esp),%edx
- xorl %edi,%eax
- shrdl $2,%ecx,%ecx
- addl %edx,%eax
- addl 16(%esp),%edx
- addl %ecx,%eax
- movl 96(%esp),%esi
- xorl %edi,%ebx
- movl 12(%esp),%ecx
- addl (%esi),%eax
- addl 4(%esi),%ebx
- addl 8(%esi),%edi
- addl 12(%esi),%ecx
- movl %eax,(%esi)
- movl %ebx,4(%esi)
- movl %edi,8(%esi)
- movl %ecx,12(%esi)
- movl %ebx,4(%esp)
- xorl %edi,%ebx
- movl %edi,8(%esp)
- movl %ecx,12(%esp)
- movl 20(%esp),%edi
- movl 24(%esp),%ecx
- addl 16(%esi),%edx
- addl 20(%esi),%edi
- addl 24(%esi),%ecx
- movl %edx,16(%esi)
- movl %edi,20(%esi)
- movl %edi,20(%esp)
- movl 28(%esp),%edi
- movl %ecx,24(%esi)
- addl 28(%esi),%edi
- movl %ecx,24(%esp)
- movl %edi,28(%esi)
- movl %edi,28(%esp)
- movl 100(%esp),%edi
- vmovdqa 64(%ebp),%xmm7
- subl $192,%ebp
- cmpl 104(%esp),%edi
- jb .L015grand_avx
- movl 108(%esp),%esp
- vzeroall
- popl %edi
- popl %esi
- popl %ebx
- popl %ebp
- ret
-.align 32
-.L014AVX_BMI:
- leal -96(%esp),%esp
- vzeroall
- movl (%esi),%eax
- movl 4(%esi),%ebx
- movl 8(%esi),%ecx
- movl 12(%esi),%edi
- movl %ebx,4(%esp)
- xorl %ecx,%ebx
- movl %ecx,8(%esp)
- movl %edi,12(%esp)
- movl 16(%esi),%edx
- movl 20(%esi),%edi
- movl 24(%esi),%ecx
- movl 28(%esi),%esi
- movl %edi,20(%esp)
- movl 100(%esp),%edi
- movl %ecx,24(%esp)
- movl %esi,28(%esp)
- vmovdqa 256(%ebp),%xmm7
- jmp .L017grand_avx_bmi
-.align 32
-.L017grand_avx_bmi:
- vmovdqu (%edi),%xmm0
- vmovdqu 16(%edi),%xmm1
- vmovdqu 32(%edi),%xmm2
- vmovdqu 48(%edi),%xmm3
- addl $64,%edi
- vpshufb %xmm7,%xmm0,%xmm0
- movl %edi,100(%esp)
- vpshufb %xmm7,%xmm1,%xmm1
- vpshufb %xmm7,%xmm2,%xmm2
- vpaddd (%ebp),%xmm0,%xmm4
- vpshufb %xmm7,%xmm3,%xmm3
- vpaddd 16(%ebp),%xmm1,%xmm5
- vpaddd 32(%ebp),%xmm2,%xmm6
- vpaddd 48(%ebp),%xmm3,%xmm7
- vmovdqa %xmm4,32(%esp)
- vmovdqa %xmm5,48(%esp)
- vmovdqa %xmm6,64(%esp)
- vmovdqa %xmm7,80(%esp)
- jmp .L018avx_bmi_00_47
-.align 16
-.L018avx_bmi_00_47:
- addl $64,%ebp
- vpalignr $4,%xmm0,%xmm1,%xmm4
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,16(%esp)
- vpalignr $4,%xmm2,%xmm3,%xmm7
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl 24(%esp),%edx,%esi
- vpsrld $7,%xmm4,%xmm6
- xorl %edi,%ecx
- andl 20(%esp),%edx
- movl %eax,(%esp)
- vpaddd %xmm7,%xmm0,%xmm0
- orl %esi,%edx
- rorxl $2,%eax,%edi
- rorxl $13,%eax,%esi
- vpsrld $3,%xmm4,%xmm7
- leal (%edx,%ecx,1),%edx
- rorxl $22,%eax,%ecx
- xorl %edi,%esi
- vpslld $14,%xmm4,%xmm5
- movl 4(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%eax
- vpxor %xmm6,%xmm7,%xmm4
- addl 28(%esp),%edx
- andl %eax,%ebx
- addl 32(%esp),%edx
- vpshufd $250,%xmm3,%xmm7
- xorl %edi,%ebx
- addl %edx,%ecx
- addl 12(%esp),%edx
- vpsrld $11,%xmm6,%xmm6
- leal (%ebx,%ecx,1),%ebx
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- vpxor %xmm5,%xmm4,%xmm4
- movl %edx,12(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- vpslld $11,%xmm5,%xmm5
- andnl 20(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 16(%esp),%edx
- vpxor %xmm6,%xmm4,%xmm4
- movl %ebx,28(%esp)
- orl %esi,%edx
- rorxl $2,%ebx,%edi
- rorxl $13,%ebx,%esi
- vpsrld $10,%xmm7,%xmm6
- leal (%edx,%ecx,1),%edx
- rorxl $22,%ebx,%ecx
- xorl %edi,%esi
- vpxor %xmm5,%xmm4,%xmm4
- movl (%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%ebx
- vpsrlq $17,%xmm7,%xmm5
- addl 24(%esp),%edx
- andl %ebx,%eax
- addl 36(%esp),%edx
- vpaddd %xmm4,%xmm0,%xmm0
- xorl %edi,%eax
- addl %edx,%ecx
- addl 8(%esp),%edx
- vpxor %xmm5,%xmm6,%xmm6
- leal (%eax,%ecx,1),%eax
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- vpsrlq $19,%xmm7,%xmm7
- movl %edx,8(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- vpxor %xmm7,%xmm6,%xmm6
- andnl 16(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 12(%esp),%edx
- vpshufd $132,%xmm6,%xmm7
- movl %eax,24(%esp)
- orl %esi,%edx
- rorxl $2,%eax,%edi
- rorxl $13,%eax,%esi
- vpsrldq $8,%xmm7,%xmm7
- leal (%edx,%ecx,1),%edx
- rorxl $22,%eax,%ecx
- xorl %edi,%esi
- vpaddd %xmm7,%xmm0,%xmm0
- movl 28(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%eax
- vpshufd $80,%xmm0,%xmm7
- addl 20(%esp),%edx
- andl %eax,%ebx
- addl 40(%esp),%edx
- vpsrld $10,%xmm7,%xmm6
- xorl %edi,%ebx
- addl %edx,%ecx
- addl 4(%esp),%edx
- vpsrlq $17,%xmm7,%xmm5
- leal (%ebx,%ecx,1),%ebx
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- vpxor %xmm5,%xmm6,%xmm6
- movl %edx,4(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- vpsrlq $19,%xmm7,%xmm7
- andnl 12(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 8(%esp),%edx
- vpxor %xmm7,%xmm6,%xmm6
- movl %ebx,20(%esp)
- orl %esi,%edx
- rorxl $2,%ebx,%edi
- rorxl $13,%ebx,%esi
- vpshufd $232,%xmm6,%xmm7
- leal (%edx,%ecx,1),%edx
- rorxl $22,%ebx,%ecx
- xorl %edi,%esi
- vpslldq $8,%xmm7,%xmm7
- movl 24(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%ebx
- vpaddd %xmm7,%xmm0,%xmm0
- addl 16(%esp),%edx
- andl %ebx,%eax
- addl 44(%esp),%edx
- vpaddd (%ebp),%xmm0,%xmm6
- xorl %edi,%eax
- addl %edx,%ecx
- addl (%esp),%edx
- leal (%eax,%ecx,1),%eax
- vmovdqa %xmm6,32(%esp)
- vpalignr $4,%xmm1,%xmm2,%xmm4
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,(%esp)
- vpalignr $4,%xmm3,%xmm0,%xmm7
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl 8(%esp),%edx,%esi
- vpsrld $7,%xmm4,%xmm6
- xorl %edi,%ecx
- andl 4(%esp),%edx
- movl %eax,16(%esp)
- vpaddd %xmm7,%xmm1,%xmm1
- orl %esi,%edx
- rorxl $2,%eax,%edi
- rorxl $13,%eax,%esi
- vpsrld $3,%xmm4,%xmm7
- leal (%edx,%ecx,1),%edx
- rorxl $22,%eax,%ecx
- xorl %edi,%esi
- vpslld $14,%xmm4,%xmm5
- movl 20(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%eax
- vpxor %xmm6,%xmm7,%xmm4
- addl 12(%esp),%edx
- andl %eax,%ebx
- addl 48(%esp),%edx
- vpshufd $250,%xmm0,%xmm7
- xorl %edi,%ebx
- addl %edx,%ecx
- addl 28(%esp),%edx
- vpsrld $11,%xmm6,%xmm6
- leal (%ebx,%ecx,1),%ebx
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- vpxor %xmm5,%xmm4,%xmm4
- movl %edx,28(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- vpslld $11,%xmm5,%xmm5
- andnl 4(%esp),%edx,%esi
- xorl %edi,%ecx
- andl (%esp),%edx
- vpxor %xmm6,%xmm4,%xmm4
- movl %ebx,12(%esp)
- orl %esi,%edx
- rorxl $2,%ebx,%edi
- rorxl $13,%ebx,%esi
- vpsrld $10,%xmm7,%xmm6
- leal (%edx,%ecx,1),%edx
- rorxl $22,%ebx,%ecx
- xorl %edi,%esi
- vpxor %xmm5,%xmm4,%xmm4
- movl 16(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%ebx
- vpsrlq $17,%xmm7,%xmm5
- addl 8(%esp),%edx
- andl %ebx,%eax
- addl 52(%esp),%edx
- vpaddd %xmm4,%xmm1,%xmm1
- xorl %edi,%eax
- addl %edx,%ecx
- addl 24(%esp),%edx
- vpxor %xmm5,%xmm6,%xmm6
- leal (%eax,%ecx,1),%eax
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- vpsrlq $19,%xmm7,%xmm7
- movl %edx,24(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- vpxor %xmm7,%xmm6,%xmm6
- andnl (%esp),%edx,%esi
- xorl %edi,%ecx
- andl 28(%esp),%edx
- vpshufd $132,%xmm6,%xmm7
- movl %eax,8(%esp)
- orl %esi,%edx
- rorxl $2,%eax,%edi
- rorxl $13,%eax,%esi
- vpsrldq $8,%xmm7,%xmm7
- leal (%edx,%ecx,1),%edx
- rorxl $22,%eax,%ecx
- xorl %edi,%esi
- vpaddd %xmm7,%xmm1,%xmm1
- movl 12(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%eax
- vpshufd $80,%xmm1,%xmm7
- addl 4(%esp),%edx
- andl %eax,%ebx
- addl 56(%esp),%edx
- vpsrld $10,%xmm7,%xmm6
- xorl %edi,%ebx
- addl %edx,%ecx
- addl 20(%esp),%edx
- vpsrlq $17,%xmm7,%xmm5
- leal (%ebx,%ecx,1),%ebx
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- vpxor %xmm5,%xmm6,%xmm6
- movl %edx,20(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- vpsrlq $19,%xmm7,%xmm7
- andnl 28(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 24(%esp),%edx
- vpxor %xmm7,%xmm6,%xmm6
- movl %ebx,4(%esp)
- orl %esi,%edx
- rorxl $2,%ebx,%edi
- rorxl $13,%ebx,%esi
- vpshufd $232,%xmm6,%xmm7
- leal (%edx,%ecx,1),%edx
- rorxl $22,%ebx,%ecx
- xorl %edi,%esi
- vpslldq $8,%xmm7,%xmm7
- movl 8(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%ebx
- vpaddd %xmm7,%xmm1,%xmm1
- addl (%esp),%edx
- andl %ebx,%eax
- addl 60(%esp),%edx
- vpaddd 16(%ebp),%xmm1,%xmm6
- xorl %edi,%eax
- addl %edx,%ecx
- addl 16(%esp),%edx
- leal (%eax,%ecx,1),%eax
- vmovdqa %xmm6,48(%esp)
- vpalignr $4,%xmm2,%xmm3,%xmm4
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,16(%esp)
- vpalignr $4,%xmm0,%xmm1,%xmm7
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl 24(%esp),%edx,%esi
- vpsrld $7,%xmm4,%xmm6
- xorl %edi,%ecx
- andl 20(%esp),%edx
- movl %eax,(%esp)
- vpaddd %xmm7,%xmm2,%xmm2
- orl %esi,%edx
- rorxl $2,%eax,%edi
- rorxl $13,%eax,%esi
- vpsrld $3,%xmm4,%xmm7
- leal (%edx,%ecx,1),%edx
- rorxl $22,%eax,%ecx
- xorl %edi,%esi
- vpslld $14,%xmm4,%xmm5
- movl 4(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%eax
- vpxor %xmm6,%xmm7,%xmm4
- addl 28(%esp),%edx
- andl %eax,%ebx
- addl 64(%esp),%edx
- vpshufd $250,%xmm1,%xmm7
- xorl %edi,%ebx
- addl %edx,%ecx
- addl 12(%esp),%edx
- vpsrld $11,%xmm6,%xmm6
- leal (%ebx,%ecx,1),%ebx
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- vpxor %xmm5,%xmm4,%xmm4
- movl %edx,12(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- vpslld $11,%xmm5,%xmm5
- andnl 20(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 16(%esp),%edx
- vpxor %xmm6,%xmm4,%xmm4
- movl %ebx,28(%esp)
- orl %esi,%edx
- rorxl $2,%ebx,%edi
- rorxl $13,%ebx,%esi
- vpsrld $10,%xmm7,%xmm6
- leal (%edx,%ecx,1),%edx
- rorxl $22,%ebx,%ecx
- xorl %edi,%esi
- vpxor %xmm5,%xmm4,%xmm4
- movl (%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%ebx
- vpsrlq $17,%xmm7,%xmm5
- addl 24(%esp),%edx
- andl %ebx,%eax
- addl 68(%esp),%edx
- vpaddd %xmm4,%xmm2,%xmm2
- xorl %edi,%eax
- addl %edx,%ecx
- addl 8(%esp),%edx
- vpxor %xmm5,%xmm6,%xmm6
- leal (%eax,%ecx,1),%eax
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- vpsrlq $19,%xmm7,%xmm7
- movl %edx,8(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- vpxor %xmm7,%xmm6,%xmm6
- andnl 16(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 12(%esp),%edx
- vpshufd $132,%xmm6,%xmm7
- movl %eax,24(%esp)
- orl %esi,%edx
- rorxl $2,%eax,%edi
- rorxl $13,%eax,%esi
- vpsrldq $8,%xmm7,%xmm7
- leal (%edx,%ecx,1),%edx
- rorxl $22,%eax,%ecx
- xorl %edi,%esi
- vpaddd %xmm7,%xmm2,%xmm2
- movl 28(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%eax
- vpshufd $80,%xmm2,%xmm7
- addl 20(%esp),%edx
- andl %eax,%ebx
- addl 72(%esp),%edx
- vpsrld $10,%xmm7,%xmm6
- xorl %edi,%ebx
- addl %edx,%ecx
- addl 4(%esp),%edx
- vpsrlq $17,%xmm7,%xmm5
- leal (%ebx,%ecx,1),%ebx
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- vpxor %xmm5,%xmm6,%xmm6
- movl %edx,4(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- vpsrlq $19,%xmm7,%xmm7
- andnl 12(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 8(%esp),%edx
- vpxor %xmm7,%xmm6,%xmm6
- movl %ebx,20(%esp)
- orl %esi,%edx
- rorxl $2,%ebx,%edi
- rorxl $13,%ebx,%esi
- vpshufd $232,%xmm6,%xmm7
- leal (%edx,%ecx,1),%edx
- rorxl $22,%ebx,%ecx
- xorl %edi,%esi
- vpslldq $8,%xmm7,%xmm7
- movl 24(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%ebx
- vpaddd %xmm7,%xmm2,%xmm2
- addl 16(%esp),%edx
- andl %ebx,%eax
- addl 76(%esp),%edx
- vpaddd 32(%ebp),%xmm2,%xmm6
- xorl %edi,%eax
- addl %edx,%ecx
- addl (%esp),%edx
- leal (%eax,%ecx,1),%eax
- vmovdqa %xmm6,64(%esp)
- vpalignr $4,%xmm3,%xmm0,%xmm4
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,(%esp)
- vpalignr $4,%xmm1,%xmm2,%xmm7
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl 8(%esp),%edx,%esi
- vpsrld $7,%xmm4,%xmm6
- xorl %edi,%ecx
- andl 4(%esp),%edx
- movl %eax,16(%esp)
- vpaddd %xmm7,%xmm3,%xmm3
- orl %esi,%edx
- rorxl $2,%eax,%edi
- rorxl $13,%eax,%esi
- vpsrld $3,%xmm4,%xmm7
- leal (%edx,%ecx,1),%edx
- rorxl $22,%eax,%ecx
- xorl %edi,%esi
- vpslld $14,%xmm4,%xmm5
- movl 20(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%eax
- vpxor %xmm6,%xmm7,%xmm4
- addl 12(%esp),%edx
- andl %eax,%ebx
- addl 80(%esp),%edx
- vpshufd $250,%xmm2,%xmm7
- xorl %edi,%ebx
- addl %edx,%ecx
- addl 28(%esp),%edx
- vpsrld $11,%xmm6,%xmm6
- leal (%ebx,%ecx,1),%ebx
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- vpxor %xmm5,%xmm4,%xmm4
- movl %edx,28(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- vpslld $11,%xmm5,%xmm5
- andnl 4(%esp),%edx,%esi
- xorl %edi,%ecx
- andl (%esp),%edx
- vpxor %xmm6,%xmm4,%xmm4
- movl %ebx,12(%esp)
- orl %esi,%edx
- rorxl $2,%ebx,%edi
- rorxl $13,%ebx,%esi
- vpsrld $10,%xmm7,%xmm6
- leal (%edx,%ecx,1),%edx
- rorxl $22,%ebx,%ecx
- xorl %edi,%esi
- vpxor %xmm5,%xmm4,%xmm4
- movl 16(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%ebx
- vpsrlq $17,%xmm7,%xmm5
- addl 8(%esp),%edx
- andl %ebx,%eax
- addl 84(%esp),%edx
- vpaddd %xmm4,%xmm3,%xmm3
- xorl %edi,%eax
- addl %edx,%ecx
- addl 24(%esp),%edx
- vpxor %xmm5,%xmm6,%xmm6
- leal (%eax,%ecx,1),%eax
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- vpsrlq $19,%xmm7,%xmm7
- movl %edx,24(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- vpxor %xmm7,%xmm6,%xmm6
- andnl (%esp),%edx,%esi
- xorl %edi,%ecx
- andl 28(%esp),%edx
- vpshufd $132,%xmm6,%xmm7
- movl %eax,8(%esp)
- orl %esi,%edx
- rorxl $2,%eax,%edi
- rorxl $13,%eax,%esi
- vpsrldq $8,%xmm7,%xmm7
- leal (%edx,%ecx,1),%edx
- rorxl $22,%eax,%ecx
- xorl %edi,%esi
- vpaddd %xmm7,%xmm3,%xmm3
- movl 12(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%eax
- vpshufd $80,%xmm3,%xmm7
- addl 4(%esp),%edx
- andl %eax,%ebx
- addl 88(%esp),%edx
- vpsrld $10,%xmm7,%xmm6
- xorl %edi,%ebx
- addl %edx,%ecx
- addl 20(%esp),%edx
- vpsrlq $17,%xmm7,%xmm5
- leal (%ebx,%ecx,1),%ebx
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- vpxor %xmm5,%xmm6,%xmm6
- movl %edx,20(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- vpsrlq $19,%xmm7,%xmm7
- andnl 28(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 24(%esp),%edx
- vpxor %xmm7,%xmm6,%xmm6
- movl %ebx,4(%esp)
- orl %esi,%edx
- rorxl $2,%ebx,%edi
- rorxl $13,%ebx,%esi
- vpshufd $232,%xmm6,%xmm7
- leal (%edx,%ecx,1),%edx
- rorxl $22,%ebx,%ecx
- xorl %edi,%esi
- vpslldq $8,%xmm7,%xmm7
- movl 8(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%ebx
- vpaddd %xmm7,%xmm3,%xmm3
- addl (%esp),%edx
- andl %ebx,%eax
- addl 92(%esp),%edx
- vpaddd 48(%ebp),%xmm3,%xmm6
- xorl %edi,%eax
- addl %edx,%ecx
- addl 16(%esp),%edx
- leal (%eax,%ecx,1),%eax
- vmovdqa %xmm6,80(%esp)
- cmpl $66051,64(%ebp)
- jne .L018avx_bmi_00_47
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,16(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl 24(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 20(%esp),%edx
- movl %eax,(%esp)
- orl %esi,%edx
- rorxl $2,%eax,%edi
- rorxl $13,%eax,%esi
- leal (%edx,%ecx,1),%edx
- rorxl $22,%eax,%ecx
- xorl %edi,%esi
- movl 4(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%eax
- addl 28(%esp),%edx
- andl %eax,%ebx
- addl 32(%esp),%edx
- xorl %edi,%ebx
- addl %edx,%ecx
- addl 12(%esp),%edx
- leal (%ebx,%ecx,1),%ebx
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,12(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl 20(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 16(%esp),%edx
- movl %ebx,28(%esp)
- orl %esi,%edx
- rorxl $2,%ebx,%edi
- rorxl $13,%ebx,%esi
- leal (%edx,%ecx,1),%edx
- rorxl $22,%ebx,%ecx
- xorl %edi,%esi
- movl (%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%ebx
- addl 24(%esp),%edx
- andl %ebx,%eax
- addl 36(%esp),%edx
- xorl %edi,%eax
- addl %edx,%ecx
- addl 8(%esp),%edx
- leal (%eax,%ecx,1),%eax
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,8(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl 16(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 12(%esp),%edx
- movl %eax,24(%esp)
- orl %esi,%edx
- rorxl $2,%eax,%edi
- rorxl $13,%eax,%esi
- leal (%edx,%ecx,1),%edx
- rorxl $22,%eax,%ecx
- xorl %edi,%esi
- movl 28(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%eax
- addl 20(%esp),%edx
- andl %eax,%ebx
- addl 40(%esp),%edx
- xorl %edi,%ebx
- addl %edx,%ecx
- addl 4(%esp),%edx
- leal (%ebx,%ecx,1),%ebx
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,4(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl 12(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 8(%esp),%edx
- movl %ebx,20(%esp)
- orl %esi,%edx
- rorxl $2,%ebx,%edi
- rorxl $13,%ebx,%esi
- leal (%edx,%ecx,1),%edx
- rorxl $22,%ebx,%ecx
- xorl %edi,%esi
- movl 24(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%ebx
- addl 16(%esp),%edx
- andl %ebx,%eax
- addl 44(%esp),%edx
- xorl %edi,%eax
- addl %edx,%ecx
- addl (%esp),%edx
- leal (%eax,%ecx,1),%eax
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl 8(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 4(%esp),%edx
- movl %eax,16(%esp)
- orl %esi,%edx
- rorxl $2,%eax,%edi
- rorxl $13,%eax,%esi
- leal (%edx,%ecx,1),%edx
- rorxl $22,%eax,%ecx
- xorl %edi,%esi
- movl 20(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%eax
- addl 12(%esp),%edx
- andl %eax,%ebx
- addl 48(%esp),%edx
- xorl %edi,%ebx
- addl %edx,%ecx
- addl 28(%esp),%edx
- leal (%ebx,%ecx,1),%ebx
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,28(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl 4(%esp),%edx,%esi
- xorl %edi,%ecx
- andl (%esp),%edx
- movl %ebx,12(%esp)
- orl %esi,%edx
- rorxl $2,%ebx,%edi
- rorxl $13,%ebx,%esi
- leal (%edx,%ecx,1),%edx
- rorxl $22,%ebx,%ecx
- xorl %edi,%esi
- movl 16(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%ebx
- addl 8(%esp),%edx
- andl %ebx,%eax
- addl 52(%esp),%edx
- xorl %edi,%eax
- addl %edx,%ecx
- addl 24(%esp),%edx
- leal (%eax,%ecx,1),%eax
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,24(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl (%esp),%edx,%esi
- xorl %edi,%ecx
- andl 28(%esp),%edx
- movl %eax,8(%esp)
- orl %esi,%edx
- rorxl $2,%eax,%edi
- rorxl $13,%eax,%esi
- leal (%edx,%ecx,1),%edx
- rorxl $22,%eax,%ecx
- xorl %edi,%esi
- movl 12(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%eax
- addl 4(%esp),%edx
- andl %eax,%ebx
- addl 56(%esp),%edx
- xorl %edi,%ebx
- addl %edx,%ecx
- addl 20(%esp),%edx
- leal (%ebx,%ecx,1),%ebx
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,20(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl 28(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 24(%esp),%edx
- movl %ebx,4(%esp)
- orl %esi,%edx
- rorxl $2,%ebx,%edi
- rorxl $13,%ebx,%esi
- leal (%edx,%ecx,1),%edx
- rorxl $22,%ebx,%ecx
- xorl %edi,%esi
- movl 8(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%ebx
- addl (%esp),%edx
- andl %ebx,%eax
- addl 60(%esp),%edx
- xorl %edi,%eax
- addl %edx,%ecx
- addl 16(%esp),%edx
- leal (%eax,%ecx,1),%eax
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,16(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl 24(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 20(%esp),%edx
- movl %eax,(%esp)
- orl %esi,%edx
- rorxl $2,%eax,%edi
- rorxl $13,%eax,%esi
- leal (%edx,%ecx,1),%edx
- rorxl $22,%eax,%ecx
- xorl %edi,%esi
- movl 4(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%eax
- addl 28(%esp),%edx
- andl %eax,%ebx
- addl 64(%esp),%edx
- xorl %edi,%ebx
- addl %edx,%ecx
- addl 12(%esp),%edx
- leal (%ebx,%ecx,1),%ebx
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,12(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl 20(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 16(%esp),%edx
- movl %ebx,28(%esp)
- orl %esi,%edx
- rorxl $2,%ebx,%edi
- rorxl $13,%ebx,%esi
- leal (%edx,%ecx,1),%edx
- rorxl $22,%ebx,%ecx
- xorl %edi,%esi
- movl (%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%ebx
- addl 24(%esp),%edx
- andl %ebx,%eax
- addl 68(%esp),%edx
- xorl %edi,%eax
- addl %edx,%ecx
- addl 8(%esp),%edx
- leal (%eax,%ecx,1),%eax
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,8(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl 16(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 12(%esp),%edx
- movl %eax,24(%esp)
- orl %esi,%edx
- rorxl $2,%eax,%edi
- rorxl $13,%eax,%esi
- leal (%edx,%ecx,1),%edx
- rorxl $22,%eax,%ecx
- xorl %edi,%esi
- movl 28(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%eax
- addl 20(%esp),%edx
- andl %eax,%ebx
- addl 72(%esp),%edx
- xorl %edi,%ebx
- addl %edx,%ecx
- addl 4(%esp),%edx
- leal (%ebx,%ecx,1),%ebx
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,4(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl 12(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 8(%esp),%edx
- movl %ebx,20(%esp)
- orl %esi,%edx
- rorxl $2,%ebx,%edi
- rorxl $13,%ebx,%esi
- leal (%edx,%ecx,1),%edx
- rorxl $22,%ebx,%ecx
- xorl %edi,%esi
- movl 24(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%ebx
- addl 16(%esp),%edx
- andl %ebx,%eax
- addl 76(%esp),%edx
- xorl %edi,%eax
- addl %edx,%ecx
- addl (%esp),%edx
- leal (%eax,%ecx,1),%eax
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl 8(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 4(%esp),%edx
- movl %eax,16(%esp)
- orl %esi,%edx
- rorxl $2,%eax,%edi
- rorxl $13,%eax,%esi
- leal (%edx,%ecx,1),%edx
- rorxl $22,%eax,%ecx
- xorl %edi,%esi
- movl 20(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%eax
- addl 12(%esp),%edx
- andl %eax,%ebx
- addl 80(%esp),%edx
- xorl %edi,%ebx
- addl %edx,%ecx
- addl 28(%esp),%edx
- leal (%ebx,%ecx,1),%ebx
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,28(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl 4(%esp),%edx,%esi
- xorl %edi,%ecx
- andl (%esp),%edx
- movl %ebx,12(%esp)
- orl %esi,%edx
- rorxl $2,%ebx,%edi
- rorxl $13,%ebx,%esi
- leal (%edx,%ecx,1),%edx
- rorxl $22,%ebx,%ecx
- xorl %edi,%esi
- movl 16(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%ebx
- addl 8(%esp),%edx
- andl %ebx,%eax
- addl 84(%esp),%edx
- xorl %edi,%eax
- addl %edx,%ecx
- addl 24(%esp),%edx
- leal (%eax,%ecx,1),%eax
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,24(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl (%esp),%edx,%esi
- xorl %edi,%ecx
- andl 28(%esp),%edx
- movl %eax,8(%esp)
- orl %esi,%edx
- rorxl $2,%eax,%edi
- rorxl $13,%eax,%esi
- leal (%edx,%ecx,1),%edx
- rorxl $22,%eax,%ecx
- xorl %edi,%esi
- movl 12(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%eax
- addl 4(%esp),%edx
- andl %eax,%ebx
- addl 88(%esp),%edx
- xorl %edi,%ebx
- addl %edx,%ecx
- addl 20(%esp),%edx
- leal (%ebx,%ecx,1),%ebx
- rorxl $6,%edx,%ecx
- rorxl $11,%edx,%esi
- movl %edx,20(%esp)
- rorxl $25,%edx,%edi
- xorl %esi,%ecx
- andnl 28(%esp),%edx,%esi
- xorl %edi,%ecx
- andl 24(%esp),%edx
- movl %ebx,4(%esp)
- orl %esi,%edx
- rorxl $2,%ebx,%edi
- rorxl $13,%ebx,%esi
- leal (%edx,%ecx,1),%edx
- rorxl $22,%ebx,%ecx
- xorl %edi,%esi
- movl 8(%esp),%edi
- xorl %esi,%ecx
- xorl %edi,%ebx
- addl (%esp),%edx
- andl %ebx,%eax
- addl 92(%esp),%edx
- xorl %edi,%eax
- addl %edx,%ecx
- addl 16(%esp),%edx
- leal (%eax,%ecx,1),%eax
- movl 96(%esp),%esi
- xorl %edi,%ebx
- movl 12(%esp),%ecx
- addl (%esi),%eax
- addl 4(%esi),%ebx
- addl 8(%esi),%edi
- addl 12(%esi),%ecx
- movl %eax,(%esi)
- movl %ebx,4(%esi)
- movl %edi,8(%esi)
- movl %ecx,12(%esi)
- movl %ebx,4(%esp)
- xorl %edi,%ebx
- movl %edi,8(%esp)
- movl %ecx,12(%esp)
- movl 20(%esp),%edi
- movl 24(%esp),%ecx
- addl 16(%esi),%edx
- addl 20(%esi),%edi
- addl 24(%esi),%ecx
- movl %edx,16(%esi)
- movl %edi,20(%esi)
- movl %edi,20(%esp)
- movl 28(%esp),%edi
- movl %ecx,24(%esi)
- addl 28(%esi),%edi
- movl %ecx,24(%esp)
- movl %edi,28(%esi)
- movl %edi,28(%esp)
- movl 100(%esp),%edi
- vmovdqa 64(%ebp),%xmm7
- subl $192,%ebp
- cmpl 104(%esp),%edi
- jb .L017grand_avx_bmi
+ jb .L011grand_ssse3
movl 108(%esp),%esp
- vzeroall
popl %edi
popl %esi
popl %ebx
diff --git a/secure/lib/libcrypto/man/man3/ADMISSIONS.3 b/secure/lib/libcrypto/man/man3/ADMISSIONS.3
index 9bee22d7dbae1..2ab4504c7eb65 100644
--- a/secure/lib/libcrypto/man/man3/ADMISSIONS.3
+++ b/secure/lib/libcrypto/man/man3/ADMISSIONS.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "ADMISSIONS 3"
-.TH ADMISSIONS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH ADMISSIONS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/ASN1_INTEGER_get_int64.3 b/secure/lib/libcrypto/man/man3/ASN1_INTEGER_get_int64.3
index e500432411501..39a51d834b96d 100644
--- a/secure/lib/libcrypto/man/man3/ASN1_INTEGER_get_int64.3
+++ b/secure/lib/libcrypto/man/man3/ASN1_INTEGER_get_int64.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "ASN1_INTEGER_GET_INT64 3"
-.TH ASN1_INTEGER_GET_INT64 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH ASN1_INTEGER_GET_INT64 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
@@ -157,10 +157,10 @@ ASN1_INTEGER_get_uint64, ASN1_INTEGER_set_uint64, ASN1_INTEGER_get_int64, ASN1_I
\& ASN1_INTEGER *BN_to_ASN1_INTEGER(const BIGNUM *bn, ASN1_INTEGER *ai);
\& BIGNUM *ASN1_INTEGER_to_BN(const ASN1_INTEGER *ai, BIGNUM *bn);
\&
-\& int ASN1_ENUMERATED_get_int64(int64_t *pr, const ASN1_INTEGER *a);
+\& int ASN1_ENUMERATED_get_int64(int64_t *pr, const ASN1_ENUMERATED *a);
\& long ASN1_ENUMERATED_get(const ASN1_ENUMERATED *a);
\&
-\& int ASN1_ENUMERATED_set_int64(ASN1_INTEGER *a, int64_t r);
+\& int ASN1_ENUMERATED_set_int64(ASN1_ENUMERATED *a, int64_t r);
\& int ASN1_ENUMERATED_set(ASN1_ENUMERATED *a, long v);
\&
\& ASN1_ENUMERATED *BN_to_ASN1_ENUMERATED(BIGNUM *bn, ASN1_ENUMERATED *ai);
diff --git a/secure/lib/libcrypto/man/man3/ASN1_ITEM_lookup.3 b/secure/lib/libcrypto/man/man3/ASN1_ITEM_lookup.3
index 6032080cc5e60..f50cddfc104c0 100644
--- a/secure/lib/libcrypto/man/man3/ASN1_ITEM_lookup.3
+++ b/secure/lib/libcrypto/man/man3/ASN1_ITEM_lookup.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "ASN1_ITEM_LOOKUP 3"
-.TH ASN1_ITEM_LOOKUP 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH ASN1_ITEM_LOOKUP 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/ASN1_OBJECT_new.3 b/secure/lib/libcrypto/man/man3/ASN1_OBJECT_new.3
index ee517b0ace522..655340911d730 100644
--- a/secure/lib/libcrypto/man/man3/ASN1_OBJECT_new.3
+++ b/secure/lib/libcrypto/man/man3/ASN1_OBJECT_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "ASN1_OBJECT_NEW 3"
-.TH ASN1_OBJECT_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH ASN1_OBJECT_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/ASN1_STRING_TABLE_add.3 b/secure/lib/libcrypto/man/man3/ASN1_STRING_TABLE_add.3
index fa1a7bc2a4b24..5dd8ef1c18e9c 100644
--- a/secure/lib/libcrypto/man/man3/ASN1_STRING_TABLE_add.3
+++ b/secure/lib/libcrypto/man/man3/ASN1_STRING_TABLE_add.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "ASN1_STRING_TABLE_ADD 3"
-.TH ASN1_STRING_TABLE_ADD 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH ASN1_STRING_TABLE_ADD 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/ASN1_STRING_length.3 b/secure/lib/libcrypto/man/man3/ASN1_STRING_length.3
index 84f0edf194f4f..456951f37d18e 100644
--- a/secure/lib/libcrypto/man/man3/ASN1_STRING_length.3
+++ b/secure/lib/libcrypto/man/man3/ASN1_STRING_length.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "ASN1_STRING_LENGTH 3"
-.TH ASN1_STRING_LENGTH 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH ASN1_STRING_LENGTH 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/ASN1_STRING_new.3 b/secure/lib/libcrypto/man/man3/ASN1_STRING_new.3
index ce20d45b163c8..4d59324d6a7bb 100644
--- a/secure/lib/libcrypto/man/man3/ASN1_STRING_new.3
+++ b/secure/lib/libcrypto/man/man3/ASN1_STRING_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "ASN1_STRING_NEW 3"
-.TH ASN1_STRING_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH ASN1_STRING_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/ASN1_STRING_print_ex.3 b/secure/lib/libcrypto/man/man3/ASN1_STRING_print_ex.3
index 96d4ab74a23bd..1c6b48ec7869e 100644
--- a/secure/lib/libcrypto/man/man3/ASN1_STRING_print_ex.3
+++ b/secure/lib/libcrypto/man/man3/ASN1_STRING_print_ex.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "ASN1_STRING_PRINT_EX 3"
-.TH ASN1_STRING_PRINT_EX 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH ASN1_STRING_PRINT_EX 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/ASN1_TIME_set.3 b/secure/lib/libcrypto/man/man3/ASN1_TIME_set.3
index c4089fa027ae7..efc8c7da59e39 100644
--- a/secure/lib/libcrypto/man/man3/ASN1_TIME_set.3
+++ b/secure/lib/libcrypto/man/man3/ASN1_TIME_set.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "ASN1_TIME_SET 3"
-.TH ASN1_TIME_SET 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH ASN1_TIME_SET 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/ASN1_TYPE_get.3 b/secure/lib/libcrypto/man/man3/ASN1_TYPE_get.3
index bdaa21551c1ca..c9c720609c639 100644
--- a/secure/lib/libcrypto/man/man3/ASN1_TYPE_get.3
+++ b/secure/lib/libcrypto/man/man3/ASN1_TYPE_get.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "ASN1_TYPE_GET 3"
-.TH ASN1_TYPE_GET 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH ASN1_TYPE_GET 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
@@ -204,7 +204,7 @@ If either or both of the parameters passed to \fBASN1_TYPE_cmp()\fR is \s-1NULL\
return value is non-zero. Technically if both parameters are \s-1NULL\s0 the two
types could be absent \s-1OPTIONAL\s0 fields and so should match, however passing
\&\s-1NULL\s0 values could also indicate a programming error (for example an
-unparseable type which returns \s-1NULL\s0) for types which do \fBnot\fR match. So
+unparsable type which returns \s-1NULL\s0) for types which do \fBnot\fR match. So
applications should handle the case of two absent values separately.
.SH "RETURN VALUES"
.IX Header "RETURN VALUES"
@@ -223,7 +223,7 @@ applications should handle the case of two absent values separately.
\&\s-1NULL\s0 on failure.
.SH "COPYRIGHT"
.IX Header "COPYRIGHT"
-Copyright 2015\-2016 The OpenSSL Project Authors. All Rights Reserved.
+Copyright 2015\-2020 The OpenSSL Project Authors. All Rights Reserved.
.PP
Licensed under the OpenSSL license (the \*(L"License\*(R"). You may not use
this file except in compliance with the License. You can obtain a copy
diff --git a/secure/lib/libcrypto/man/man3/ASN1_generate_nconf.3 b/secure/lib/libcrypto/man/man3/ASN1_generate_nconf.3
index 835adc7b3be11..086d0ebe19c20 100644
--- a/secure/lib/libcrypto/man/man3/ASN1_generate_nconf.3
+++ b/secure/lib/libcrypto/man/man3/ASN1_generate_nconf.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "ASN1_GENERATE_NCONF 3"
-.TH ASN1_GENERATE_NCONF 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH ASN1_GENERATE_NCONF 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/ASYNC_WAIT_CTX_new.3 b/secure/lib/libcrypto/man/man3/ASYNC_WAIT_CTX_new.3
index 3e45ef782019d..14af8f68721f2 100644
--- a/secure/lib/libcrypto/man/man3/ASYNC_WAIT_CTX_new.3
+++ b/secure/lib/libcrypto/man/man3/ASYNC_WAIT_CTX_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "ASYNC_WAIT_CTX_NEW 3"
-.TH ASYNC_WAIT_CTX_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH ASYNC_WAIT_CTX_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/ASYNC_start_job.3 b/secure/lib/libcrypto/man/man3/ASYNC_start_job.3
index c55195c6ea005..b6ae72a776823 100644
--- a/secure/lib/libcrypto/man/man3/ASYNC_start_job.3
+++ b/secure/lib/libcrypto/man/man3/ASYNC_start_job.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "ASYNC_START_JOB 3"
-.TH ASYNC_START_JOB 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH ASYNC_START_JOB 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BF_encrypt.3 b/secure/lib/libcrypto/man/man3/BF_encrypt.3
index 6b3e3291d2f33..6358aca19ee25 100644
--- a/secure/lib/libcrypto/man/man3/BF_encrypt.3
+++ b/secure/lib/libcrypto/man/man3/BF_encrypt.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BF_ENCRYPT 3"
-.TH BF_ENCRYPT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BF_ENCRYPT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BIO_ADDR.3 b/secure/lib/libcrypto/man/man3/BIO_ADDR.3
index 3cbe4dd42cd9b..27ebbbb090500 100644
--- a/secure/lib/libcrypto/man/man3/BIO_ADDR.3
+++ b/secure/lib/libcrypto/man/man3/BIO_ADDR.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BIO_ADDR 3"
-.TH BIO_ADDR 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BIO_ADDR 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BIO_ADDRINFO.3 b/secure/lib/libcrypto/man/man3/BIO_ADDRINFO.3
index c514538980712..6cea726f5a052 100644
--- a/secure/lib/libcrypto/man/man3/BIO_ADDRINFO.3
+++ b/secure/lib/libcrypto/man/man3/BIO_ADDRINFO.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BIO_ADDRINFO 3"
-.TH BIO_ADDRINFO 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BIO_ADDRINFO 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BIO_connect.3 b/secure/lib/libcrypto/man/man3/BIO_connect.3
index b1664b43884d5..8b23cac0eaf10 100644
--- a/secure/lib/libcrypto/man/man3/BIO_connect.3
+++ b/secure/lib/libcrypto/man/man3/BIO_connect.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BIO_CONNECT 3"
-.TH BIO_CONNECT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BIO_CONNECT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BIO_ctrl.3 b/secure/lib/libcrypto/man/man3/BIO_ctrl.3
index 287f73cefc4db..834902d5f771e 100644
--- a/secure/lib/libcrypto/man/man3/BIO_ctrl.3
+++ b/secure/lib/libcrypto/man/man3/BIO_ctrl.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BIO_CTRL 3"
-.TH BIO_CTRL 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BIO_CTRL 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BIO_f_base64.3 b/secure/lib/libcrypto/man/man3/BIO_f_base64.3
index bed5432e7681a..152e6a8874dce 100644
--- a/secure/lib/libcrypto/man/man3/BIO_f_base64.3
+++ b/secure/lib/libcrypto/man/man3/BIO_f_base64.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BIO_F_BASE64 3"
-.TH BIO_F_BASE64 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BIO_F_BASE64 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BIO_f_buffer.3 b/secure/lib/libcrypto/man/man3/BIO_f_buffer.3
index cc79eb36244fd..0b014cc12c939 100644
--- a/secure/lib/libcrypto/man/man3/BIO_f_buffer.3
+++ b/secure/lib/libcrypto/man/man3/BIO_f_buffer.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BIO_F_BUFFER 3"
-.TH BIO_F_BUFFER 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BIO_F_BUFFER 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
@@ -179,10 +179,20 @@ is expanded.
.IX Header "NOTES"
These functions, other than \fBBIO_f_buffer()\fR, are implemented as macros.
.PP
-Buffering BIOs implement \fBBIO_gets()\fR by using \fBBIO_read_ex()\fR operations on the
-next \s-1BIO\s0 in the chain. By prepending a buffering \s-1BIO\s0 to a chain it is therefore
-possible to provide \fBBIO_gets()\fR functionality if the following BIOs do not
-support it (for example \s-1SSL\s0 BIOs).
+Buffering BIOs implement \fBBIO_read_ex()\fR and \fBBIO_gets()\fR by using
+\&\fBBIO_read_ex()\fR operations on the next \s-1BIO\s0 in the chain and storing the
+result in an internal buffer, from which bytes are given back to the
+caller as appropriate for the call; a \fBBIO_gets()\fR is guaranteed to give
+the caller a whole line, and \fBBIO_read_ex()\fR is guaranteed to give the
+caller the number of bytes it asks for, unless there's an error or end
+of communication is reached in the next \s-1BIO.\s0 By prepending a
+buffering \s-1BIO\s0 to a chain it is therefore possible to provide
+\&\fBBIO_gets()\fR or exact size \fBBIO_read_ex()\fR functionality if the following
+BIOs do not support it.
+.PP
+Do not add more than one \fBBIO_f_buffer()\fR to a \s-1BIO\s0 chain. The result of
+doing so will force a full read of the size of the internal buffer of
+the top \fBBIO_f_buffer()\fR, which is 4 KiB at a minimum.
.PP
Data is only written to the next \s-1BIO\s0 in the chain when the write buffer fills
or when \fBBIO_flush()\fR is called. It is therefore important to call \fBBIO_flush()\fR
@@ -209,7 +219,7 @@ there was an error.
\&\fBBIO_ctrl\fR\|(3).
.SH "COPYRIGHT"
.IX Header "COPYRIGHT"
-Copyright 2000\-2016 The OpenSSL Project Authors. All Rights Reserved.
+Copyright 2000\-2020 The OpenSSL Project Authors. All Rights Reserved.
.PP
Licensed under the OpenSSL license (the \*(L"License\*(R"). You may not use
this file except in compliance with the License. You can obtain a copy
diff --git a/secure/lib/libcrypto/man/man3/BIO_f_cipher.3 b/secure/lib/libcrypto/man/man3/BIO_f_cipher.3
index f7831722b60e1..8b7ce8c75d36b 100644
--- a/secure/lib/libcrypto/man/man3/BIO_f_cipher.3
+++ b/secure/lib/libcrypto/man/man3/BIO_f_cipher.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BIO_F_CIPHER 3"
-.TH BIO_F_CIPHER 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BIO_F_CIPHER 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BIO_f_md.3 b/secure/lib/libcrypto/man/man3/BIO_f_md.3
index 0a8eeed7a5d3c..1db013f11cebd 100644
--- a/secure/lib/libcrypto/man/man3/BIO_f_md.3
+++ b/secure/lib/libcrypto/man/man3/BIO_f_md.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BIO_F_MD 3"
-.TH BIO_F_MD 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BIO_F_MD 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BIO_f_null.3 b/secure/lib/libcrypto/man/man3/BIO_f_null.3
index d6d168dad2040..3fefc47f65e05 100644
--- a/secure/lib/libcrypto/man/man3/BIO_f_null.3
+++ b/secure/lib/libcrypto/man/man3/BIO_f_null.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BIO_F_NULL 3"
-.TH BIO_F_NULL 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BIO_F_NULL 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BIO_f_ssl.3 b/secure/lib/libcrypto/man/man3/BIO_f_ssl.3
index e131f3a1dd1f8..d3b10db6954af 100644
--- a/secure/lib/libcrypto/man/man3/BIO_f_ssl.3
+++ b/secure/lib/libcrypto/man/man3/BIO_f_ssl.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BIO_F_SSL 3"
-.TH BIO_F_SSL 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BIO_F_SSL 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BIO_find_type.3 b/secure/lib/libcrypto/man/man3/BIO_find_type.3
index 534534d17c2ec..0d49fd3672e84 100644
--- a/secure/lib/libcrypto/man/man3/BIO_find_type.3
+++ b/secure/lib/libcrypto/man/man3/BIO_find_type.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BIO_FIND_TYPE 3"
-.TH BIO_FIND_TYPE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BIO_FIND_TYPE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BIO_get_data.3 b/secure/lib/libcrypto/man/man3/BIO_get_data.3
index bc46e7b221552..f24013d850fba 100644
--- a/secure/lib/libcrypto/man/man3/BIO_get_data.3
+++ b/secure/lib/libcrypto/man/man3/BIO_get_data.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BIO_GET_DATA 3"
-.TH BIO_GET_DATA 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BIO_GET_DATA 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BIO_get_ex_new_index.3 b/secure/lib/libcrypto/man/man3/BIO_get_ex_new_index.3
index 2b0092d60bfd6..e0b01d64b8f15 100644
--- a/secure/lib/libcrypto/man/man3/BIO_get_ex_new_index.3
+++ b/secure/lib/libcrypto/man/man3/BIO_get_ex_new_index.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BIO_GET_EX_NEW_INDEX 3"
-.TH BIO_GET_EX_NEW_INDEX 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BIO_GET_EX_NEW_INDEX 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
@@ -163,7 +163,7 @@ for any of the OpenSSL datatypes listed in
These functions handle application-specific data for OpenSSL data
structures.
.PP
-\&\fBTYPE_get_new_ex_index()\fR is a macro that calls \fBCRYPTO_get_ex_new_index()\fR
+\&\fBTYPE_get_ex_new_index()\fR is a macro that calls \fBCRYPTO_get_ex_new_index()\fR
with the correct \fBindex\fR value.
.PP
\&\fBTYPE_set_ex_data()\fR is a function that calls \fBCRYPTO_set_ex_data()\fR with
@@ -173,7 +173,7 @@ an offset into the opaque exdata part of the \s-1TYPE\s0 object.
an offset into the opaque exdata part of the \s-1TYPE\s0 object.
.SH "RETURN VALUES"
.IX Header "RETURN VALUES"
-\&\fBTYPE_get_new_ex_index()\fR returns a new index on success or \-1 on error.
+\&\fBTYPE_get_ex_new_index()\fR returns a new index on success or \-1 on error.
.PP
\&\fBTYPE_set_ex_data()\fR returns 1 on success or 0 on error.
.PP
@@ -183,7 +183,7 @@ an offset into the opaque exdata part of the \s-1TYPE\s0 object.
\&\fBCRYPTO_get_ex_new_index\fR\|(3).
.SH "COPYRIGHT"
.IX Header "COPYRIGHT"
-Copyright 2015\-2018 The OpenSSL Project Authors. All Rights Reserved.
+Copyright 2015\-2020 The OpenSSL Project Authors. All Rights Reserved.
.PP
Licensed under the OpenSSL license (the \*(L"License\*(R"). You may not use
this file except in compliance with the License. You can obtain a copy
diff --git a/secure/lib/libcrypto/man/man3/BIO_meth_new.3 b/secure/lib/libcrypto/man/man3/BIO_meth_new.3
index 306a62eb8eb09..c0612608da1c7 100644
--- a/secure/lib/libcrypto/man/man3/BIO_meth_new.3
+++ b/secure/lib/libcrypto/man/man3/BIO_meth_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BIO_METH_NEW 3"
-.TH BIO_METH_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BIO_METH_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BIO_new.3 b/secure/lib/libcrypto/man/man3/BIO_new.3
index 6cb997f31e102..178796650c09d 100644
--- a/secure/lib/libcrypto/man/man3/BIO_new.3
+++ b/secure/lib/libcrypto/man/man3/BIO_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BIO_NEW 3"
-.TH BIO_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BIO_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BIO_new_CMS.3 b/secure/lib/libcrypto/man/man3/BIO_new_CMS.3
index e59a0a3b75810..91f37601fe629 100644
--- a/secure/lib/libcrypto/man/man3/BIO_new_CMS.3
+++ b/secure/lib/libcrypto/man/man3/BIO_new_CMS.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BIO_NEW_CMS 3"
-.TH BIO_NEW_CMS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BIO_NEW_CMS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BIO_parse_hostserv.3 b/secure/lib/libcrypto/man/man3/BIO_parse_hostserv.3
index 747fbc19cc6c1..23bac0c66285f 100644
--- a/secure/lib/libcrypto/man/man3/BIO_parse_hostserv.3
+++ b/secure/lib/libcrypto/man/man3/BIO_parse_hostserv.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BIO_PARSE_HOSTSERV 3"
-.TH BIO_PARSE_HOSTSERV 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BIO_PARSE_HOSTSERV 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BIO_printf.3 b/secure/lib/libcrypto/man/man3/BIO_printf.3
index d68262148fff9..6f391abd12f1e 100644
--- a/secure/lib/libcrypto/man/man3/BIO_printf.3
+++ b/secure/lib/libcrypto/man/man3/BIO_printf.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BIO_PRINTF 3"
-.TH BIO_PRINTF 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BIO_PRINTF 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BIO_push.3 b/secure/lib/libcrypto/man/man3/BIO_push.3
index 56cf8c42036b1..38cb3098b9458 100644
--- a/secure/lib/libcrypto/man/man3/BIO_push.3
+++ b/secure/lib/libcrypto/man/man3/BIO_push.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BIO_PUSH 3"
-.TH BIO_PUSH 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BIO_PUSH 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BIO_read.3 b/secure/lib/libcrypto/man/man3/BIO_read.3
index e138ff703a2c1..fbb291f8e095e 100644
--- a/secure/lib/libcrypto/man/man3/BIO_read.3
+++ b/secure/lib/libcrypto/man/man3/BIO_read.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BIO_READ 3"
-.TH BIO_READ 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BIO_READ 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BIO_s_accept.3 b/secure/lib/libcrypto/man/man3/BIO_s_accept.3
index 8f4e156d897e9..3b92822346337 100644
--- a/secure/lib/libcrypto/man/man3/BIO_s_accept.3
+++ b/secure/lib/libcrypto/man/man3/BIO_s_accept.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BIO_S_ACCEPT 3"
-.TH BIO_S_ACCEPT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BIO_S_ACCEPT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BIO_s_bio.3 b/secure/lib/libcrypto/man/man3/BIO_s_bio.3
index 43f2206bfce23..ad0c065498c31 100644
--- a/secure/lib/libcrypto/man/man3/BIO_s_bio.3
+++ b/secure/lib/libcrypto/man/man3/BIO_s_bio.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BIO_S_BIO 3"
-.TH BIO_S_BIO 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BIO_S_BIO 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BIO_s_connect.3 b/secure/lib/libcrypto/man/man3/BIO_s_connect.3
index c74545f610b0c..450cb6ba7fef0 100644
--- a/secure/lib/libcrypto/man/man3/BIO_s_connect.3
+++ b/secure/lib/libcrypto/man/man3/BIO_s_connect.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BIO_S_CONNECT 3"
-.TH BIO_S_CONNECT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BIO_S_CONNECT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BIO_s_fd.3 b/secure/lib/libcrypto/man/man3/BIO_s_fd.3
index 924f962a5a2b5..241ac633cf280 100644
--- a/secure/lib/libcrypto/man/man3/BIO_s_fd.3
+++ b/secure/lib/libcrypto/man/man3/BIO_s_fd.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BIO_S_FD 3"
-.TH BIO_S_FD 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BIO_S_FD 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BIO_s_file.3 b/secure/lib/libcrypto/man/man3/BIO_s_file.3
index 1f301d5c61388..28bdb535c7f63 100644
--- a/secure/lib/libcrypto/man/man3/BIO_s_file.3
+++ b/secure/lib/libcrypto/man/man3/BIO_s_file.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BIO_S_FILE 3"
-.TH BIO_S_FILE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BIO_S_FILE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BIO_s_mem.3 b/secure/lib/libcrypto/man/man3/BIO_s_mem.3
index 5e80706205506..6744dc8c0ebd0 100644
--- a/secure/lib/libcrypto/man/man3/BIO_s_mem.3
+++ b/secure/lib/libcrypto/man/man3/BIO_s_mem.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BIO_S_MEM 3"
-.TH BIO_S_MEM 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BIO_S_MEM 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BIO_s_null.3 b/secure/lib/libcrypto/man/man3/BIO_s_null.3
index d122e432ff84c..b3e70b090cb86 100644
--- a/secure/lib/libcrypto/man/man3/BIO_s_null.3
+++ b/secure/lib/libcrypto/man/man3/BIO_s_null.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BIO_S_NULL 3"
-.TH BIO_S_NULL 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BIO_S_NULL 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BIO_s_socket.3 b/secure/lib/libcrypto/man/man3/BIO_s_socket.3
index 8e66ed7944042..e0cec93715e4f 100644
--- a/secure/lib/libcrypto/man/man3/BIO_s_socket.3
+++ b/secure/lib/libcrypto/man/man3/BIO_s_socket.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BIO_S_SOCKET 3"
-.TH BIO_S_SOCKET 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BIO_S_SOCKET 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BIO_set_callback.3 b/secure/lib/libcrypto/man/man3/BIO_set_callback.3
index 844e35ac49308..3a7167238e031 100644
--- a/secure/lib/libcrypto/man/man3/BIO_set_callback.3
+++ b/secure/lib/libcrypto/man/man3/BIO_set_callback.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BIO_SET_CALLBACK 3"
-.TH BIO_SET_CALLBACK 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BIO_SET_CALLBACK 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BIO_should_retry.3 b/secure/lib/libcrypto/man/man3/BIO_should_retry.3
index c2e0991d6d874..193d94084960b 100644
--- a/secure/lib/libcrypto/man/man3/BIO_should_retry.3
+++ b/secure/lib/libcrypto/man/man3/BIO_should_retry.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BIO_SHOULD_RETRY 3"
-.TH BIO_SHOULD_RETRY 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BIO_SHOULD_RETRY 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BN_BLINDING_new.3 b/secure/lib/libcrypto/man/man3/BN_BLINDING_new.3
index 958fa5df3da42..c41074bf5e70e 100644
--- a/secure/lib/libcrypto/man/man3/BN_BLINDING_new.3
+++ b/secure/lib/libcrypto/man/man3/BN_BLINDING_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BN_BLINDING_NEW 3"
-.TH BN_BLINDING_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BN_BLINDING_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BN_CTX_new.3 b/secure/lib/libcrypto/man/man3/BN_CTX_new.3
index 374136f5f513a..fdb53654ecca6 100644
--- a/secure/lib/libcrypto/man/man3/BN_CTX_new.3
+++ b/secure/lib/libcrypto/man/man3/BN_CTX_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BN_CTX_NEW 3"
-.TH BN_CTX_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BN_CTX_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BN_CTX_start.3 b/secure/lib/libcrypto/man/man3/BN_CTX_start.3
index e4fb021d3e241..fda22ff305b1c 100644
--- a/secure/lib/libcrypto/man/man3/BN_CTX_start.3
+++ b/secure/lib/libcrypto/man/man3/BN_CTX_start.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BN_CTX_START 3"
-.TH BN_CTX_START 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BN_CTX_START 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BN_add.3 b/secure/lib/libcrypto/man/man3/BN_add.3
index 9683f020b1b15..df5e03230556d 100644
--- a/secure/lib/libcrypto/man/man3/BN_add.3
+++ b/secure/lib/libcrypto/man/man3/BN_add.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BN_ADD 3"
-.TH BN_ADD 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BN_ADD 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BN_add_word.3 b/secure/lib/libcrypto/man/man3/BN_add_word.3
index e55cafb88322f..6e41c0e4c9628 100644
--- a/secure/lib/libcrypto/man/man3/BN_add_word.3
+++ b/secure/lib/libcrypto/man/man3/BN_add_word.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BN_ADD_WORD 3"
-.TH BN_ADD_WORD 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BN_ADD_WORD 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BN_bn2bin.3 b/secure/lib/libcrypto/man/man3/BN_bn2bin.3
index d7d6f6b0502ee..069afc6a2cc1a 100644
--- a/secure/lib/libcrypto/man/man3/BN_bn2bin.3
+++ b/secure/lib/libcrypto/man/man3/BN_bn2bin.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BN_BN2BIN 3"
-.TH BN_BN2BIN 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BN_BN2BIN 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BN_cmp.3 b/secure/lib/libcrypto/man/man3/BN_cmp.3
index ee74b8a15d7c7..730fedc1abdeb 100644
--- a/secure/lib/libcrypto/man/man3/BN_cmp.3
+++ b/secure/lib/libcrypto/man/man3/BN_cmp.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BN_CMP 3"
-.TH BN_CMP 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BN_CMP 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BN_copy.3 b/secure/lib/libcrypto/man/man3/BN_copy.3
index ec49b42ff7714..07f441c01998d 100644
--- a/secure/lib/libcrypto/man/man3/BN_copy.3
+++ b/secure/lib/libcrypto/man/man3/BN_copy.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BN_COPY 3"
-.TH BN_COPY 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BN_COPY 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BN_generate_prime.3 b/secure/lib/libcrypto/man/man3/BN_generate_prime.3
index 8ac4b3f5d0b39..20b12c6ae77ad 100644
--- a/secure/lib/libcrypto/man/man3/BN_generate_prime.3
+++ b/secure/lib/libcrypto/man/man3/BN_generate_prime.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BN_GENERATE_PRIME 3"
-.TH BN_GENERATE_PRIME 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BN_GENERATE_PRIME 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BN_mod_inverse.3 b/secure/lib/libcrypto/man/man3/BN_mod_inverse.3
index 13e5b9628ca46..4e0e11f68127a 100644
--- a/secure/lib/libcrypto/man/man3/BN_mod_inverse.3
+++ b/secure/lib/libcrypto/man/man3/BN_mod_inverse.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BN_MOD_INVERSE 3"
-.TH BN_MOD_INVERSE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BN_MOD_INVERSE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BN_mod_mul_montgomery.3 b/secure/lib/libcrypto/man/man3/BN_mod_mul_montgomery.3
index c6047fb4bb221..0f79aa1e1d13c 100644
--- a/secure/lib/libcrypto/man/man3/BN_mod_mul_montgomery.3
+++ b/secure/lib/libcrypto/man/man3/BN_mod_mul_montgomery.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BN_MOD_MUL_MONTGOMERY 3"
-.TH BN_MOD_MUL_MONTGOMERY 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BN_MOD_MUL_MONTGOMERY 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BN_mod_mul_reciprocal.3 b/secure/lib/libcrypto/man/man3/BN_mod_mul_reciprocal.3
index 5356ad44fb218..1bd1b117d4478 100644
--- a/secure/lib/libcrypto/man/man3/BN_mod_mul_reciprocal.3
+++ b/secure/lib/libcrypto/man/man3/BN_mod_mul_reciprocal.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BN_MOD_MUL_RECIPROCAL 3"
-.TH BN_MOD_MUL_RECIPROCAL 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BN_MOD_MUL_RECIPROCAL 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BN_new.3 b/secure/lib/libcrypto/man/man3/BN_new.3
index 73f6401160062..d9d6d2b089929 100644
--- a/secure/lib/libcrypto/man/man3/BN_new.3
+++ b/secure/lib/libcrypto/man/man3/BN_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BN_NEW 3"
-.TH BN_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BN_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BN_num_bytes.3 b/secure/lib/libcrypto/man/man3/BN_num_bytes.3
index 9973d0e9d129c..c827a95db31b3 100644
--- a/secure/lib/libcrypto/man/man3/BN_num_bytes.3
+++ b/secure/lib/libcrypto/man/man3/BN_num_bytes.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BN_NUM_BYTES 3"
-.TH BN_NUM_BYTES 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BN_NUM_BYTES 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BN_rand.3 b/secure/lib/libcrypto/man/man3/BN_rand.3
index 1574ada4fd00a..f993d3e755053 100644
--- a/secure/lib/libcrypto/man/man3/BN_rand.3
+++ b/secure/lib/libcrypto/man/man3/BN_rand.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BN_RAND 3"
-.TH BN_RAND 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BN_RAND 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BN_security_bits.3 b/secure/lib/libcrypto/man/man3/BN_security_bits.3
index dbf4672d92f32..5adbb8cf3cff7 100644
--- a/secure/lib/libcrypto/man/man3/BN_security_bits.3
+++ b/secure/lib/libcrypto/man/man3/BN_security_bits.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BN_SECURITY_BITS 3"
-.TH BN_SECURITY_BITS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BN_SECURITY_BITS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BN_set_bit.3 b/secure/lib/libcrypto/man/man3/BN_set_bit.3
index af8baa21ad98d..0e71898d2d0af 100644
--- a/secure/lib/libcrypto/man/man3/BN_set_bit.3
+++ b/secure/lib/libcrypto/man/man3/BN_set_bit.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BN_SET_BIT 3"
-.TH BN_SET_BIT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BN_SET_BIT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BN_swap.3 b/secure/lib/libcrypto/man/man3/BN_swap.3
index f3021437d408a..06ef18f6dedc6 100644
--- a/secure/lib/libcrypto/man/man3/BN_swap.3
+++ b/secure/lib/libcrypto/man/man3/BN_swap.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BN_SWAP 3"
-.TH BN_SWAP 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BN_SWAP 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BN_zero.3 b/secure/lib/libcrypto/man/man3/BN_zero.3
index 12e39b7dae57c..8e86a5a767ccf 100644
--- a/secure/lib/libcrypto/man/man3/BN_zero.3
+++ b/secure/lib/libcrypto/man/man3/BN_zero.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BN_ZERO 3"
-.TH BN_ZERO 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BN_ZERO 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/BUF_MEM_new.3 b/secure/lib/libcrypto/man/man3/BUF_MEM_new.3
index 0c28684ed9330..0d73cacc9e51e 100644
--- a/secure/lib/libcrypto/man/man3/BUF_MEM_new.3
+++ b/secure/lib/libcrypto/man/man3/BUF_MEM_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BUF_MEM_NEW 3"
-.TH BUF_MEM_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BUF_MEM_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/CMS_add0_cert.3 b/secure/lib/libcrypto/man/man3/CMS_add0_cert.3
index 26194ada00bce..24789ab679d39 100644
--- a/secure/lib/libcrypto/man/man3/CMS_add0_cert.3
+++ b/secure/lib/libcrypto/man/man3/CMS_add0_cert.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "CMS_ADD0_CERT 3"
-.TH CMS_ADD0_CERT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH CMS_ADD0_CERT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/CMS_add1_recipient_cert.3 b/secure/lib/libcrypto/man/man3/CMS_add1_recipient_cert.3
index 6b75788e97c98..d7a6d35256ac4 100644
--- a/secure/lib/libcrypto/man/man3/CMS_add1_recipient_cert.3
+++ b/secure/lib/libcrypto/man/man3/CMS_add1_recipient_cert.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "CMS_ADD1_RECIPIENT_CERT 3"
-.TH CMS_ADD1_RECIPIENT_CERT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH CMS_ADD1_RECIPIENT_CERT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/CMS_add1_signer.3 b/secure/lib/libcrypto/man/man3/CMS_add1_signer.3
index 0d046f4b02f92..3f0fd6a6b8614 100644
--- a/secure/lib/libcrypto/man/man3/CMS_add1_signer.3
+++ b/secure/lib/libcrypto/man/man3/CMS_add1_signer.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "CMS_ADD1_SIGNER 3"
-.TH CMS_ADD1_SIGNER 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH CMS_ADD1_SIGNER 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/CMS_compress.3 b/secure/lib/libcrypto/man/man3/CMS_compress.3
index 916df4b6e643c..1c6d5857ad08a 100644
--- a/secure/lib/libcrypto/man/man3/CMS_compress.3
+++ b/secure/lib/libcrypto/man/man3/CMS_compress.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "CMS_COMPRESS 3"
-.TH CMS_COMPRESS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH CMS_COMPRESS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/CMS_decrypt.3 b/secure/lib/libcrypto/man/man3/CMS_decrypt.3
index ee4d4cf4c6460..a66fb7bbffd68 100644
--- a/secure/lib/libcrypto/man/man3/CMS_decrypt.3
+++ b/secure/lib/libcrypto/man/man3/CMS_decrypt.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "CMS_DECRYPT 3"
-.TH CMS_DECRYPT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH CMS_DECRYPT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/CMS_encrypt.3 b/secure/lib/libcrypto/man/man3/CMS_encrypt.3
index cc5034b10dd3f..38c778f21d4cc 100644
--- a/secure/lib/libcrypto/man/man3/CMS_encrypt.3
+++ b/secure/lib/libcrypto/man/man3/CMS_encrypt.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "CMS_ENCRYPT 3"
-.TH CMS_ENCRYPT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH CMS_ENCRYPT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/CMS_final.3 b/secure/lib/libcrypto/man/man3/CMS_final.3
index dd660c28d62a3..f97ba3b5e47f4 100644
--- a/secure/lib/libcrypto/man/man3/CMS_final.3
+++ b/secure/lib/libcrypto/man/man3/CMS_final.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "CMS_FINAL 3"
-.TH CMS_FINAL 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH CMS_FINAL 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/CMS_get0_RecipientInfos.3 b/secure/lib/libcrypto/man/man3/CMS_get0_RecipientInfos.3
index dba1f218c199f..a59ca5019eea4 100644
--- a/secure/lib/libcrypto/man/man3/CMS_get0_RecipientInfos.3
+++ b/secure/lib/libcrypto/man/man3/CMS_get0_RecipientInfos.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "CMS_GET0_RECIPIENTINFOS 3"
-.TH CMS_GET0_RECIPIENTINFOS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH CMS_GET0_RECIPIENTINFOS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/CMS_get0_SignerInfos.3 b/secure/lib/libcrypto/man/man3/CMS_get0_SignerInfos.3
index 22d4b12d07859..8f27e3ac73a9d 100644
--- a/secure/lib/libcrypto/man/man3/CMS_get0_SignerInfos.3
+++ b/secure/lib/libcrypto/man/man3/CMS_get0_SignerInfos.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "CMS_GET0_SIGNERINFOS 3"
-.TH CMS_GET0_SIGNERINFOS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH CMS_GET0_SIGNERINFOS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/CMS_get0_type.3 b/secure/lib/libcrypto/man/man3/CMS_get0_type.3
index 1d5a063bac5b6..ef5d4e02b5465 100644
--- a/secure/lib/libcrypto/man/man3/CMS_get0_type.3
+++ b/secure/lib/libcrypto/man/man3/CMS_get0_type.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "CMS_GET0_TYPE 3"
-.TH CMS_GET0_TYPE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH CMS_GET0_TYPE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/CMS_get1_ReceiptRequest.3 b/secure/lib/libcrypto/man/man3/CMS_get1_ReceiptRequest.3
index f28fb192ae872..1e574e6482407 100644
--- a/secure/lib/libcrypto/man/man3/CMS_get1_ReceiptRequest.3
+++ b/secure/lib/libcrypto/man/man3/CMS_get1_ReceiptRequest.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "CMS_GET1_RECEIPTREQUEST 3"
-.TH CMS_GET1_RECEIPTREQUEST 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH CMS_GET1_RECEIPTREQUEST 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/CMS_sign.3 b/secure/lib/libcrypto/man/man3/CMS_sign.3
index b633d6b2e8288..deec251cd2f39 100644
--- a/secure/lib/libcrypto/man/man3/CMS_sign.3
+++ b/secure/lib/libcrypto/man/man3/CMS_sign.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "CMS_SIGN 3"
-.TH CMS_SIGN 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH CMS_SIGN 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/CMS_sign_receipt.3 b/secure/lib/libcrypto/man/man3/CMS_sign_receipt.3
index 01ae089746642..0dda157a7663c 100644
--- a/secure/lib/libcrypto/man/man3/CMS_sign_receipt.3
+++ b/secure/lib/libcrypto/man/man3/CMS_sign_receipt.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "CMS_SIGN_RECEIPT 3"
-.TH CMS_SIGN_RECEIPT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH CMS_SIGN_RECEIPT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/CMS_uncompress.3 b/secure/lib/libcrypto/man/man3/CMS_uncompress.3
index eac1a921aee48..eb3dbb7da17fc 100644
--- a/secure/lib/libcrypto/man/man3/CMS_uncompress.3
+++ b/secure/lib/libcrypto/man/man3/CMS_uncompress.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "CMS_UNCOMPRESS 3"
-.TH CMS_UNCOMPRESS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH CMS_UNCOMPRESS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/CMS_verify.3 b/secure/lib/libcrypto/man/man3/CMS_verify.3
index 1031b46acf8ab..609167885280a 100644
--- a/secure/lib/libcrypto/man/man3/CMS_verify.3
+++ b/secure/lib/libcrypto/man/man3/CMS_verify.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "CMS_VERIFY 3"
-.TH CMS_VERIFY 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH CMS_VERIFY 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/CMS_verify_receipt.3 b/secure/lib/libcrypto/man/man3/CMS_verify_receipt.3
index 0e90ff6de9024..dfd661023aef2 100644
--- a/secure/lib/libcrypto/man/man3/CMS_verify_receipt.3
+++ b/secure/lib/libcrypto/man/man3/CMS_verify_receipt.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "CMS_VERIFY_RECEIPT 3"
-.TH CMS_VERIFY_RECEIPT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH CMS_VERIFY_RECEIPT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/CONF_modules_free.3 b/secure/lib/libcrypto/man/man3/CONF_modules_free.3
index 96020613e9e67..66182d4df2a1f 100644
--- a/secure/lib/libcrypto/man/man3/CONF_modules_free.3
+++ b/secure/lib/libcrypto/man/man3/CONF_modules_free.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "CONF_MODULES_FREE 3"
-.TH CONF_MODULES_FREE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH CONF_MODULES_FREE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/CONF_modules_load_file.3 b/secure/lib/libcrypto/man/man3/CONF_modules_load_file.3
index f5b5d22a7381b..ba4814281493e 100644
--- a/secure/lib/libcrypto/man/man3/CONF_modules_load_file.3
+++ b/secure/lib/libcrypto/man/man3/CONF_modules_load_file.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "CONF_MODULES_LOAD_FILE 3"
-.TH CONF_MODULES_LOAD_FILE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH CONF_MODULES_LOAD_FILE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/CRYPTO_THREAD_run_once.3 b/secure/lib/libcrypto/man/man3/CRYPTO_THREAD_run_once.3
index 6e638d566043c..df565ef63ead0 100644
--- a/secure/lib/libcrypto/man/man3/CRYPTO_THREAD_run_once.3
+++ b/secure/lib/libcrypto/man/man3/CRYPTO_THREAD_run_once.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "CRYPTO_THREAD_RUN_ONCE 3"
-.TH CRYPTO_THREAD_RUN_ONCE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH CRYPTO_THREAD_RUN_ONCE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/CRYPTO_get_ex_new_index.3 b/secure/lib/libcrypto/man/man3/CRYPTO_get_ex_new_index.3
index 503fa928855fb..1c9fc56ddc981 100644
--- a/secure/lib/libcrypto/man/man3/CRYPTO_get_ex_new_index.3
+++ b/secure/lib/libcrypto/man/man3/CRYPTO_get_ex_new_index.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "CRYPTO_GET_EX_NEW_INDEX 3"
-.TH CRYPTO_GET_EX_NEW_INDEX 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH CRYPTO_GET_EX_NEW_INDEX 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/CRYPTO_memcmp.3 b/secure/lib/libcrypto/man/man3/CRYPTO_memcmp.3
index 2474f4b6ab570..69da81f215950 100644
--- a/secure/lib/libcrypto/man/man3/CRYPTO_memcmp.3
+++ b/secure/lib/libcrypto/man/man3/CRYPTO_memcmp.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "CRYPTO_MEMCMP 3"
-.TH CRYPTO_MEMCMP 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH CRYPTO_MEMCMP 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/CTLOG_STORE_get0_log_by_id.3 b/secure/lib/libcrypto/man/man3/CTLOG_STORE_get0_log_by_id.3
index a1ffd4a69cb37..650fd40f1dda2 100644
--- a/secure/lib/libcrypto/man/man3/CTLOG_STORE_get0_log_by_id.3
+++ b/secure/lib/libcrypto/man/man3/CTLOG_STORE_get0_log_by_id.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "CTLOG_STORE_GET0_LOG_BY_ID 3"
-.TH CTLOG_STORE_GET0_LOG_BY_ID 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH CTLOG_STORE_GET0_LOG_BY_ID 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/CTLOG_STORE_new.3 b/secure/lib/libcrypto/man/man3/CTLOG_STORE_new.3
index 99743eb0c91c4..f2d05a14654d2 100644
--- a/secure/lib/libcrypto/man/man3/CTLOG_STORE_new.3
+++ b/secure/lib/libcrypto/man/man3/CTLOG_STORE_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "CTLOG_STORE_NEW 3"
-.TH CTLOG_STORE_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH CTLOG_STORE_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/CTLOG_new.3 b/secure/lib/libcrypto/man/man3/CTLOG_new.3
index 4eb73ff5abcda..3e05b9d292448 100644
--- a/secure/lib/libcrypto/man/man3/CTLOG_new.3
+++ b/secure/lib/libcrypto/man/man3/CTLOG_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "CTLOG_NEW 3"
-.TH CTLOG_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH CTLOG_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/CT_POLICY_EVAL_CTX_new.3 b/secure/lib/libcrypto/man/man3/CT_POLICY_EVAL_CTX_new.3
index 70e7d6fb4bf09..aea7f17c7807d 100644
--- a/secure/lib/libcrypto/man/man3/CT_POLICY_EVAL_CTX_new.3
+++ b/secure/lib/libcrypto/man/man3/CT_POLICY_EVAL_CTX_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "CT_POLICY_EVAL_CTX_NEW 3"
-.TH CT_POLICY_EVAL_CTX_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH CT_POLICY_EVAL_CTX_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/DEFINE_STACK_OF.3 b/secure/lib/libcrypto/man/man3/DEFINE_STACK_OF.3
index b6675168dff7b..fa31f42c6733a 100644
--- a/secure/lib/libcrypto/man/man3/DEFINE_STACK_OF.3
+++ b/secure/lib/libcrypto/man/man3/DEFINE_STACK_OF.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "DEFINE_STACK_OF 3"
-.TH DEFINE_STACK_OF 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH DEFINE_STACK_OF 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/DES_random_key.3 b/secure/lib/libcrypto/man/man3/DES_random_key.3
index 0dd0ece6dd4db..7c0c517ef6b1a 100644
--- a/secure/lib/libcrypto/man/man3/DES_random_key.3
+++ b/secure/lib/libcrypto/man/man3/DES_random_key.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "DES_RANDOM_KEY 3"
-.TH DES_RANDOM_KEY 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH DES_RANDOM_KEY 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/DH_generate_key.3 b/secure/lib/libcrypto/man/man3/DH_generate_key.3
index 3cd7a90e46695..ca8a09bffb4f6 100644
--- a/secure/lib/libcrypto/man/man3/DH_generate_key.3
+++ b/secure/lib/libcrypto/man/man3/DH_generate_key.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "DH_GENERATE_KEY 3"
-.TH DH_GENERATE_KEY 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH DH_GENERATE_KEY 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/DH_generate_parameters.3 b/secure/lib/libcrypto/man/man3/DH_generate_parameters.3
index eb64edd6639fd..449a5c01cac3d 100644
--- a/secure/lib/libcrypto/man/man3/DH_generate_parameters.3
+++ b/secure/lib/libcrypto/man/man3/DH_generate_parameters.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "DH_GENERATE_PARAMETERS 3"
-.TH DH_GENERATE_PARAMETERS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH DH_GENERATE_PARAMETERS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/DH_get0_pqg.3 b/secure/lib/libcrypto/man/man3/DH_get0_pqg.3
index f16b880f6e6ba..17cb2c917afb3 100644
--- a/secure/lib/libcrypto/man/man3/DH_get0_pqg.3
+++ b/secure/lib/libcrypto/man/man3/DH_get0_pqg.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "DH_GET0_PQG 3"
-.TH DH_GET0_PQG 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH DH_GET0_PQG 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/DH_get_1024_160.3 b/secure/lib/libcrypto/man/man3/DH_get_1024_160.3
index f0a3f0b7718b7..a940411baed3e 100644
--- a/secure/lib/libcrypto/man/man3/DH_get_1024_160.3
+++ b/secure/lib/libcrypto/man/man3/DH_get_1024_160.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "DH_GET_1024_160 3"
-.TH DH_GET_1024_160 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH DH_GET_1024_160 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/DH_meth_new.3 b/secure/lib/libcrypto/man/man3/DH_meth_new.3
index aed529ba21d72..125c9d6743f35 100644
--- a/secure/lib/libcrypto/man/man3/DH_meth_new.3
+++ b/secure/lib/libcrypto/man/man3/DH_meth_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "DH_METH_NEW 3"
-.TH DH_METH_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH DH_METH_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/DH_new.3 b/secure/lib/libcrypto/man/man3/DH_new.3
index 2fd0721b86d06..39581ecd81506 100644
--- a/secure/lib/libcrypto/man/man3/DH_new.3
+++ b/secure/lib/libcrypto/man/man3/DH_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "DH_NEW 3"
-.TH DH_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH DH_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/DH_new_by_nid.3 b/secure/lib/libcrypto/man/man3/DH_new_by_nid.3
index 4e85d2c0053c3..470ed51086310 100644
--- a/secure/lib/libcrypto/man/man3/DH_new_by_nid.3
+++ b/secure/lib/libcrypto/man/man3/DH_new_by_nid.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "DH_NEW_BY_NID 3"
-.TH DH_NEW_BY_NID 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH DH_NEW_BY_NID 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/DH_set_method.3 b/secure/lib/libcrypto/man/man3/DH_set_method.3
index 3702353f11e94..6a4d7cbb549a1 100644
--- a/secure/lib/libcrypto/man/man3/DH_set_method.3
+++ b/secure/lib/libcrypto/man/man3/DH_set_method.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "DH_SET_METHOD 3"
-.TH DH_SET_METHOD 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH DH_SET_METHOD 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/DH_size.3 b/secure/lib/libcrypto/man/man3/DH_size.3
index f47dbeb9e1196..560f8efa9c735 100644
--- a/secure/lib/libcrypto/man/man3/DH_size.3
+++ b/secure/lib/libcrypto/man/man3/DH_size.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "DH_SIZE 3"
-.TH DH_SIZE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH DH_SIZE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/DSA_SIG_new.3 b/secure/lib/libcrypto/man/man3/DSA_SIG_new.3
index 1c40267fd1871..a6eafff4db9de 100644
--- a/secure/lib/libcrypto/man/man3/DSA_SIG_new.3
+++ b/secure/lib/libcrypto/man/man3/DSA_SIG_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "DSA_SIG_NEW 3"
-.TH DSA_SIG_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH DSA_SIG_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/DSA_do_sign.3 b/secure/lib/libcrypto/man/man3/DSA_do_sign.3
index c0221b8ea9bed..9bd3d6c50d2f7 100644
--- a/secure/lib/libcrypto/man/man3/DSA_do_sign.3
+++ b/secure/lib/libcrypto/man/man3/DSA_do_sign.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "DSA_DO_SIGN 3"
-.TH DSA_DO_SIGN 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH DSA_DO_SIGN 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/DSA_dup_DH.3 b/secure/lib/libcrypto/man/man3/DSA_dup_DH.3
index 9b3852a3dc9e1..eb09c509e42da 100644
--- a/secure/lib/libcrypto/man/man3/DSA_dup_DH.3
+++ b/secure/lib/libcrypto/man/man3/DSA_dup_DH.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "DSA_DUP_DH 3"
-.TH DSA_DUP_DH 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH DSA_DUP_DH 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/DSA_generate_key.3 b/secure/lib/libcrypto/man/man3/DSA_generate_key.3
index 10ed1b44efbd0..9e6b6a9d439c5 100644
--- a/secure/lib/libcrypto/man/man3/DSA_generate_key.3
+++ b/secure/lib/libcrypto/man/man3/DSA_generate_key.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "DSA_GENERATE_KEY 3"
-.TH DSA_GENERATE_KEY 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH DSA_GENERATE_KEY 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/DSA_generate_parameters.3 b/secure/lib/libcrypto/man/man3/DSA_generate_parameters.3
index 15d56a2727a1f..50993915d1c52 100644
--- a/secure/lib/libcrypto/man/man3/DSA_generate_parameters.3
+++ b/secure/lib/libcrypto/man/man3/DSA_generate_parameters.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "DSA_GENERATE_PARAMETERS 3"
-.TH DSA_GENERATE_PARAMETERS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH DSA_GENERATE_PARAMETERS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/DSA_get0_pqg.3 b/secure/lib/libcrypto/man/man3/DSA_get0_pqg.3
index fed19b74d7313..009098316d345 100644
--- a/secure/lib/libcrypto/man/man3/DSA_get0_pqg.3
+++ b/secure/lib/libcrypto/man/man3/DSA_get0_pqg.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "DSA_GET0_PQG 3"
-.TH DSA_GET0_PQG 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH DSA_GET0_PQG 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/DSA_meth_new.3 b/secure/lib/libcrypto/man/man3/DSA_meth_new.3
index 0fb5e6c234220..d78ff98d08954 100644
--- a/secure/lib/libcrypto/man/man3/DSA_meth_new.3
+++ b/secure/lib/libcrypto/man/man3/DSA_meth_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "DSA_METH_NEW 3"
-.TH DSA_METH_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH DSA_METH_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/DSA_new.3 b/secure/lib/libcrypto/man/man3/DSA_new.3
index 031b13f4bfb92..7ad7d390e9d7c 100644
--- a/secure/lib/libcrypto/man/man3/DSA_new.3
+++ b/secure/lib/libcrypto/man/man3/DSA_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "DSA_NEW 3"
-.TH DSA_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH DSA_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/DSA_set_method.3 b/secure/lib/libcrypto/man/man3/DSA_set_method.3
index df3e03928363f..42ac41f4dba54 100644
--- a/secure/lib/libcrypto/man/man3/DSA_set_method.3
+++ b/secure/lib/libcrypto/man/man3/DSA_set_method.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "DSA_SET_METHOD 3"
-.TH DSA_SET_METHOD 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH DSA_SET_METHOD 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/DSA_sign.3 b/secure/lib/libcrypto/man/man3/DSA_sign.3
index 573016019693c..2306ab2c985e0 100644
--- a/secure/lib/libcrypto/man/man3/DSA_sign.3
+++ b/secure/lib/libcrypto/man/man3/DSA_sign.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "DSA_SIGN 3"
-.TH DSA_SIGN 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH DSA_SIGN 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/DSA_size.3 b/secure/lib/libcrypto/man/man3/DSA_size.3
index 486383b20f50f..2f2a98b6942af 100644
--- a/secure/lib/libcrypto/man/man3/DSA_size.3
+++ b/secure/lib/libcrypto/man/man3/DSA_size.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "DSA_SIZE 3"
-.TH DSA_SIZE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH DSA_SIZE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/DTLS_get_data_mtu.3 b/secure/lib/libcrypto/man/man3/DTLS_get_data_mtu.3
index c66b8b2bc0590..083ed3cccc00c 100644
--- a/secure/lib/libcrypto/man/man3/DTLS_get_data_mtu.3
+++ b/secure/lib/libcrypto/man/man3/DTLS_get_data_mtu.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "DTLS_GET_DATA_MTU 3"
-.TH DTLS_GET_DATA_MTU 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH DTLS_GET_DATA_MTU 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/DTLS_set_timer_cb.3 b/secure/lib/libcrypto/man/man3/DTLS_set_timer_cb.3
index 6fd51298d1357..65f0a9bb18e18 100644
--- a/secure/lib/libcrypto/man/man3/DTLS_set_timer_cb.3
+++ b/secure/lib/libcrypto/man/man3/DTLS_set_timer_cb.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "DTLS_SET_TIMER_CB 3"
-.TH DTLS_SET_TIMER_CB 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH DTLS_SET_TIMER_CB 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/DTLSv1_listen.3 b/secure/lib/libcrypto/man/man3/DTLSv1_listen.3
index 44b330e3523fa..8472bfd5c6289 100644
--- a/secure/lib/libcrypto/man/man3/DTLSv1_listen.3
+++ b/secure/lib/libcrypto/man/man3/DTLSv1_listen.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "DTLSV1_LISTEN 3"
-.TH DTLSV1_LISTEN 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH DTLSV1_LISTEN 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
@@ -200,9 +200,9 @@ the peer and continue the handshake in a connected state.
.PP
Prior to calling \fBDTLSv1_listen()\fR user code must ensure that cookie generation
and verification callbacks have been set up using
-\&\fBSSL_CTX_set_cookie_generate_cb()\fR and \fBSSL_CTX_set_cookie_verify_cb()\fR
-respectively. For \fBSSL_stateless()\fR, \fBSSL_CTX_set_stateless_cookie_generate_cb()\fR
-and \fBSSL_CTX_set_stateless_cookie_verify_cb()\fR must be used instead.
+\&\fBSSL_CTX_set_cookie_generate_cb\fR\|(3) and \fBSSL_CTX_set_cookie_verify_cb\fR\|(3)
+respectively. For \fBSSL_stateless()\fR, \fBSSL_CTX_set_stateless_cookie_generate_cb\fR\|(3)
+and \fBSSL_CTX_set_stateless_cookie_verify_cb\fR\|(3) must be used instead.
.PP
Since \fBDTLSv1_listen()\fR operates entirely statelessly whilst processing incoming
ClientHellos it is unable to process fragmented messages (since this would
@@ -244,8 +244,10 @@ produce return codes <= 0 (in typical implementations user code treats all
errors as non-fatal), whilst return codes >0 indicate success.
.SH "SEE ALSO"
.IX Header "SEE ALSO"
-\&\fBSSL_get_error\fR\|(3), \fBSSL_accept\fR\|(3),
-\&\fBssl\fR\|(7), \fBbio\fR\|(7)
+\&\fBSSL_CTX_set_cookie_generate_cb\fR\|(3), \fBSSL_CTX_set_cookie_verify_cb\fR\|(3),
+\&\fBSSL_CTX_set_stateless_cookie_generate_cb\fR\|(3),
+\&\fBSSL_CTX_set_stateless_cookie_verify_cb\fR\|(3), \fBSSL_get_error\fR\|(3),
+\&\fBSSL_accept\fR\|(3), \fBssl\fR\|(7), \fBbio\fR\|(7)
.SH "HISTORY"
.IX Header "HISTORY"
The \fBSSL_stateless()\fR function was added in OpenSSL 1.1.1.
diff --git a/secure/lib/libcrypto/man/man3/ECDSA_SIG_new.3 b/secure/lib/libcrypto/man/man3/ECDSA_SIG_new.3
index b8d56d38ca5a5..ad04af24f06fd 100644
--- a/secure/lib/libcrypto/man/man3/ECDSA_SIG_new.3
+++ b/secure/lib/libcrypto/man/man3/ECDSA_SIG_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "ECDSA_SIG_NEW 3"
-.TH ECDSA_SIG_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH ECDSA_SIG_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/ECPKParameters_print.3 b/secure/lib/libcrypto/man/man3/ECPKParameters_print.3
index 6ad90ecb3c475..497574b27941f 100644
--- a/secure/lib/libcrypto/man/man3/ECPKParameters_print.3
+++ b/secure/lib/libcrypto/man/man3/ECPKParameters_print.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "ECPKPARAMETERS_PRINT 3"
-.TH ECPKPARAMETERS_PRINT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH ECPKPARAMETERS_PRINT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EC_GFp_simple_method.3 b/secure/lib/libcrypto/man/man3/EC_GFp_simple_method.3
index be5f0668c35c5..df81e2ca03d16 100644
--- a/secure/lib/libcrypto/man/man3/EC_GFp_simple_method.3
+++ b/secure/lib/libcrypto/man/man3/EC_GFp_simple_method.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EC_GFP_SIMPLE_METHOD 3"
-.TH EC_GFP_SIMPLE_METHOD 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EC_GFP_SIMPLE_METHOD 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EC_GROUP_copy.3 b/secure/lib/libcrypto/man/man3/EC_GROUP_copy.3
index cc369cbcdd2d8..816db65b79c02 100644
--- a/secure/lib/libcrypto/man/man3/EC_GROUP_copy.3
+++ b/secure/lib/libcrypto/man/man3/EC_GROUP_copy.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EC_GROUP_COPY 3"
-.TH EC_GROUP_COPY 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EC_GROUP_COPY 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
@@ -167,7 +167,7 @@ EC_GROUP_get0_order, EC_GROUP_order_bits, EC_GROUP_get0_cofactor, EC_GROUP_copy,
\& int EC_GROUP_get_asn1_flag(const EC_GROUP *group);
\&
\& void EC_GROUP_set_point_conversion_form(EC_GROUP *group, point_conversion_form_t form);
-\& point_conversion_form_t EC_GROUP_get_point_conversion_form(const EC_GROUP *);
+\& point_conversion_form_t EC_GROUP_get_point_conversion_form(const EC_GROUP *group);
\&
\& unsigned char *EC_GROUP_get0_seed(const EC_GROUP *x);
\& size_t EC_GROUP_get_seed_len(const EC_GROUP *);
@@ -188,34 +188,39 @@ EC_GROUP_get0_order, EC_GROUP_order_bits, EC_GROUP_get0_cofactor, EC_GROUP_copy,
.Ve
.SH "DESCRIPTION"
.IX Header "DESCRIPTION"
-EC_GROUP_copy copies the curve \fBsrc\fR into \fBdst\fR. Both \fBsrc\fR and \fBdst\fR must use the same \s-1EC_METHOD.\s0
+\&\fBEC_GROUP_copy()\fR copies the curve \fBsrc\fR into \fBdst\fR. Both \fBsrc\fR and \fBdst\fR must use the same \s-1EC_METHOD.\s0
.PP
-EC_GROUP_dup creates a new \s-1EC_GROUP\s0 object and copies the content from \fBsrc\fR to the newly created
+\&\fBEC_GROUP_dup()\fR creates a new \s-1EC_GROUP\s0 object and copies the content from \fBsrc\fR to the newly created
\&\s-1EC_GROUP\s0 object.
.PP
-EC_GROUP_method_of obtains the \s-1EC_METHOD\s0 of \fBgroup\fR.
+\&\fBEC_GROUP_method_of()\fR obtains the \s-1EC_METHOD\s0 of \fBgroup\fR.
.PP
-EC_GROUP_set_generator sets curve parameters that must be agreed by all participants using the curve. These
+\&\fBEC_GROUP_set_generator()\fR sets curve parameters that must be agreed by all participants using the curve. These
parameters include the \fBgenerator\fR, the \fBorder\fR and the \fBcofactor\fR. The \fBgenerator\fR is a well defined point on the
curve chosen for cryptographic operations. Integers used for point multiplications will be between 0 and
n\-1 where n is the \fBorder\fR. The \fBorder\fR multiplied by the \fBcofactor\fR gives the number of points on the curve.
.PP
-EC_GROUP_get0_generator returns the generator for the identified \fBgroup\fR.
+\&\fBEC_GROUP_get0_generator()\fR returns the generator for the identified \fBgroup\fR.
.PP
-The functions EC_GROUP_get_order and EC_GROUP_get_cofactor populate the provided \fBorder\fR and \fBcofactor\fR parameters
-with the respective order and cofactors for the \fBgroup\fR.
+\&\fBEC_GROUP_get_order()\fR retrieves the order of \fBgroup\fR and copies its value into
+\&\fBorder\fR. It fails in case \fBgroup\fR is not fully initialized (i.e., its order
+is not set or set to zero).
.PP
-The functions EC_GROUP_set_curve_name and EC_GROUP_get_curve_name, set and get the \s-1NID\s0 for the curve respectively
+\&\fBEC_GROUP_get_cofactor()\fR retrieves the cofactor of \fBgroup\fR and copies its value
+into \fBcofactor\fR. It fails in case \fBgroup\fR is not fully initialized or if the
+cofactor is not set (or set to zero).
+.PP
+The functions \fBEC_GROUP_set_curve_name()\fR and \fBEC_GROUP_get_curve_name()\fR, set and get the \s-1NID\s0 for the curve respectively
(see \fBEC_GROUP_new\fR\|(3)). If a curve does not have a \s-1NID\s0 associated with it, then EC_GROUP_get_curve_name
-will return 0.
+will return NID_undef.
.PP
The asn1_flag value is used to determine whether the curve encoding uses
explicit parameters or a named curve using an \s-1ASN1 OID:\s0 many applications only
support the latter form. If asn1_flag is \fB\s-1OPENSSL_EC_NAMED_CURVE\s0\fR then the
named curve form is used and the parameters must have a corresponding
named curve \s-1NID\s0 set. If asn1_flags is \fB\s-1OPENSSL_EC_EXPLICIT_CURVE\s0\fR the
-parameters are explicitly encoded. The functions EC_GROUP_get_asn1_flag and
-EC_GROUP_set_asn1_flag get and set the status of the asn1_flag for the curve.
+parameters are explicitly encoded. The functions \fBEC_GROUP_get_asn1_flag()\fR and
+\&\fBEC_GROUP_set_asn1_flag()\fR get and set the status of the asn1_flag for the curve.
Note: \fB\s-1OPENSSL_EC_EXPLICIT_CURVE\s0\fR was added in OpenSSL 1.1.0, for
previous versions of OpenSSL the value 0 must be used instead. Before OpenSSL
1.1.0 the default form was to use explicit parameters (meaning that
@@ -248,32 +253,32 @@ the two possible solutions for y has been used, followed by the octets for x.
For \s-1POINT_CONVERSION_HYBRID\s0 the point is encoded as an octet signifying the \s-1HYBRID\s0 form has been used \s-1AND\s0 which of the two
possible solutions for y has been used, followed by the octets for x, followed by the octets for y.
.PP
-The functions EC_GROUP_set_point_conversion_form and EC_GROUP_get_point_conversion_form set and get the point_conversion_form
+The functions \fBEC_GROUP_set_point_conversion_form()\fR and \fBEC_GROUP_get_point_conversion_form()\fR, set and get the point_conversion_form
for the curve respectively.
.PP
\&\s-1ANSI X9.62\s0 (\s-1ECDSA\s0 standard) defines a method of generating the curve parameter b from a random number. This provides advantages
in that a parameter obtained in this way is highly unlikely to be susceptible to special purpose attacks, or have any trapdoors in it.
If the seed is present for a curve then the b parameter was generated in a verifiable fashion using that seed. The OpenSSL \s-1EC\s0 library
-does not use this seed value but does enable you to inspect it using EC_GROUP_get0_seed. This returns a pointer to a memory block
-containing the seed that was used. The length of the memory block can be obtained using EC_GROUP_get_seed_len. A number of the
-builtin curves within the library provide seed values that can be obtained. It is also possible to set a custom seed using
-EC_GROUP_set_seed and passing a pointer to a memory block, along with the length of the seed. Again, the \s-1EC\s0 library will not use
+does not use this seed value but does enable you to inspect it using \fBEC_GROUP_get0_seed()\fR. This returns a pointer to a memory block
+containing the seed that was used. The length of the memory block can be obtained using \fBEC_GROUP_get_seed_len()\fR. A number of the
+built-in curves within the library provide seed values that can be obtained. It is also possible to set a custom seed using
+\&\fBEC_GROUP_set_seed()\fR and passing a pointer to a memory block, along with the length of the seed. Again, the \s-1EC\s0 library will not use
this seed value, although it will be preserved in any \s-1ASN1\s0 based communications.
.PP
-EC_GROUP_get_degree gets the degree of the field. For Fp fields this will be the number of bits in p. For F2^m fields this will be
+\&\fBEC_GROUP_get_degree()\fR gets the degree of the field. For Fp fields this will be the number of bits in p. For F2^m fields this will be
the value m.
.PP
-The function EC_GROUP_check_discriminant calculates the discriminant for the curve and verifies that it is valid.
+The function \fBEC_GROUP_check_discriminant()\fR calculates the discriminant for the curve and verifies that it is valid.
For a curve defined over Fp the discriminant is given by the formula 4*a^3 + 27*b^2 whilst for F2^m curves the discriminant is
simply b. In either case for the curve to be valid the discriminant must be non zero.
.PP
-The function EC_GROUP_check performs a number of checks on a curve to verify that it is valid. Checks performed include
+The function \fBEC_GROUP_check()\fR performs a number of checks on a curve to verify that it is valid. Checks performed include
verifying that the discriminant is non zero; that a generator has been defined; that the generator is on the curve and has
the correct order.
.PP
-EC_GROUP_cmp compares \fBa\fR and \fBb\fR to determine whether they represent the same curve or not.
+\&\fBEC_GROUP_cmp()\fR compares \fBa\fR and \fBb\fR to determine whether they represent the same curve or not.
.PP
-The functions EC_GROUP_get_basis_type, EC_GROUP_get_trinomial_basis and EC_GROUP_get_pentanomial_basis should only be called for curves
+The functions \fBEC_GROUP_get_basis_type()\fR, \fBEC_GROUP_get_trinomial_basis()\fR and \fBEC_GROUP_get_pentanomial_basis()\fR should only be called for curves
defined over an F2^m field. Addition and multiplication operations within an F2^m field are performed using an irreducible polynomial
function f(x). This function is either a trinomial of the form:
.PP
@@ -283,38 +288,47 @@ or a pentanomial of the form:
.PP
f(x) = x^m + x^k3 + x^k2 + x^k1 + 1 with m > k3 > k2 > k1 >= 1
.PP
-The function EC_GROUP_get_basis_type returns a \s-1NID\s0 identifying whether a trinomial or pentanomial is in use for the field. The
-function EC_GROUP_get_trinomial_basis must only be called where f(x) is of the trinomial form, and returns the value of \fBk\fR. Similarly
-the function EC_GROUP_get_pentanomial_basis must only be called where f(x) is of the pentanomial form, and returns the values of \fBk1\fR,
+The function \fBEC_GROUP_get_basis_type()\fR returns a \s-1NID\s0 identifying whether a trinomial or pentanomial is in use for the field. The
+function \fBEC_GROUP_get_trinomial_basis()\fR must only be called where f(x) is of the trinomial form, and returns the value of \fBk\fR. Similarly
+the function \fBEC_GROUP_get_pentanomial_basis()\fR must only be called where f(x) is of the pentanomial form, and returns the values of \fBk1\fR,
\&\fBk2\fR and \fBk3\fR respectively.
.SH "RETURN VALUES"
.IX Header "RETURN VALUES"
-The following functions return 1 on success or 0 on error: EC_GROUP_copy, EC_GROUP_set_generator, EC_GROUP_check,
-EC_GROUP_check_discriminant, EC_GROUP_get_trinomial_basis and EC_GROUP_get_pentanomial_basis.
+The following functions return 1 on success or 0 on error: \fBEC_GROUP_copy()\fR, \fBEC_GROUP_set_generator()\fR, \fBEC_GROUP_check()\fR,
+\&\fBEC_GROUP_check_discriminant()\fR, \fBEC_GROUP_get_trinomial_basis()\fR and \fBEC_GROUP_get_pentanomial_basis()\fR.
+.PP
+\&\fBEC_GROUP_dup()\fR returns a pointer to the duplicated curve, or \s-1NULL\s0 on error.
+.PP
+\&\fBEC_GROUP_method_of()\fR returns the \s-1EC_METHOD\s0 implementation in use for the given curve or \s-1NULL\s0 on error.
+.PP
+\&\fBEC_GROUP_get0_generator()\fR returns the generator for the given curve or \s-1NULL\s0 on error.
+.PP
+\&\fBEC_GROUP_get_order()\fR returns 0 if the order is not set (or set to zero) for
+\&\fBgroup\fR or if copying into \fBorder\fR fails, 1 otherwise.
+.PP
+\&\fBEC_GROUP_get_cofactor()\fR returns 0 if the cofactor is not set (or is set to zero) for \fBgroup\fR or if copying into \fBcofactor\fR fails, 1 otherwise.
.PP
-EC_GROUP_dup returns a pointer to the duplicated curve, or \s-1NULL\s0 on error.
+\&\fBEC_GROUP_get_curve_name()\fR returns the curve name (\s-1NID\s0) for \fBgroup\fR or will return NID_undef if no curve name is associated.
.PP
-EC_GROUP_method_of returns the \s-1EC_METHOD\s0 implementation in use for the given curve or \s-1NULL\s0 on error.
+\&\fBEC_GROUP_get_asn1_flag()\fR returns the \s-1ASN1\s0 flag for the specified \fBgroup\fR .
.PP
-EC_GROUP_get0_generator returns the generator for the given curve or \s-1NULL\s0 on error.
+\&\fBEC_GROUP_get_point_conversion_form()\fR returns the point_conversion_form for \fBgroup\fR.
.PP
-EC_GROUP_get_order, EC_GROUP_get_cofactor, EC_GROUP_get_curve_name, EC_GROUP_get_asn1_flag, EC_GROUP_get_point_conversion_form
-and EC_GROUP_get_degree return the order, cofactor, curve name (\s-1NID\s0), \s-1ASN1\s0 flag, point_conversion_form and degree for the
-specified curve respectively. If there is no curve name associated with a curve then EC_GROUP_get_curve_name will return 0.
+\&\fBEC_GROUP_get_degree()\fR returns the degree for \fBgroup\fR or 0 if the operation is not supported by the underlying group implementation.
.PP
\&\fBEC_GROUP_get0_order()\fR returns an internal pointer to the group order.
\&\fBEC_GROUP_order_bits()\fR returns the number of bits in the group order.
\&\fBEC_GROUP_get0_cofactor()\fR returns an internal pointer to the group cofactor.
.PP
-EC_GROUP_get0_seed returns a pointer to the seed that was used to generate the parameter b, or \s-1NULL\s0 if the seed is not
-specified. EC_GROUP_get_seed_len returns the length of the seed or 0 if the seed is not specified.
+\&\fBEC_GROUP_get0_seed()\fR returns a pointer to the seed that was used to generate the parameter b, or \s-1NULL\s0 if the seed is not
+specified. \fBEC_GROUP_get_seed_len()\fR returns the length of the seed or 0 if the seed is not specified.
.PP
-EC_GROUP_set_seed returns the length of the seed that has been set. If the supplied seed is \s-1NULL,\s0 or the supplied seed length is
+\&\fBEC_GROUP_set_seed()\fR returns the length of the seed that has been set. If the supplied seed is \s-1NULL,\s0 or the supplied seed length is
0, the return value will be 1. On error 0 is returned.
.PP
-EC_GROUP_cmp returns 0 if the curves are equal, 1 if they are not equal, or \-1 on error.
+\&\fBEC_GROUP_cmp()\fR returns 0 if the curves are equal, 1 if they are not equal, or \-1 on error.
.PP
-EC_GROUP_get_basis_type returns the values NID_X9_62_tpBasis or NID_X9_62_ppBasis (as defined in <openssl/obj_mac.h>) for a
+\&\fBEC_GROUP_get_basis_type()\fR returns the values NID_X9_62_tpBasis or NID_X9_62_ppBasis (as defined in <openssl/obj_mac.h>) for a
trinomial or pentanomial respectively. Alternatively in the event of an error a 0 is returned.
.SH "SEE ALSO"
.IX Header "SEE ALSO"
diff --git a/secure/lib/libcrypto/man/man3/EC_GROUP_new.3 b/secure/lib/libcrypto/man/man3/EC_GROUP_new.3
index 2fc979596884e..9c812dde7f0a2 100644
--- a/secure/lib/libcrypto/man/man3/EC_GROUP_new.3
+++ b/secure/lib/libcrypto/man/man3/EC_GROUP_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EC_GROUP_NEW 3"
-.TH EC_GROUP_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EC_GROUP_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
@@ -177,49 +177,63 @@ EC_GROUP_get_ecparameters, EC_GROUP_get_ecpkparameters, EC_GROUP_new, EC_GROUP_n
.Ve
.SH "DESCRIPTION"
.IX Header "DESCRIPTION"
-Within the library there are two forms of elliptic curve that are of interest. The first form is those defined over the
-prime field Fp. The elements of Fp are the integers 0 to p\-1, where p is a prime number. This gives us a revised
+Within the library there are two forms of elliptic curve that are of interest.
+The first form is those defined over the prime field Fp. The elements of Fp are
+the integers 0 to p\-1, where p is a prime number. This gives us a revised
elliptic curve equation as follows:
.PP
y^2 mod p = x^3 +ax + b mod p
.PP
-The second form is those defined over a binary field F2^m where the elements of the field are integers of length at
-most m bits. For this form the elliptic curve equation is modified to:
+The second form is those defined over a binary field F2^m where the elements of
+the field are integers of length at most m bits. For this form the elliptic
+curve equation is modified to:
.PP
y^2 + xy = x^3 + ax^2 + b (where b != 0)
.PP
-Operations in a binary field are performed relative to an \fBirreducible polynomial\fR. All such curves with OpenSSL
-use a trinomial or a pentanomial for this parameter.
+Operations in a binary field are performed relative to an \fBirreducible
+polynomial\fR. All such curves with OpenSSL use a trinomial or a pentanomial for
+this parameter.
.PP
-A new curve can be constructed by calling EC_GROUP_new, using the implementation provided by \fBmeth\fR (see
-\&\fBEC_GFp_simple_method\fR\|(3)). It is then necessary to call \fBEC_GROUP_set_curve()\fR to set the curve parameters.
-\&\fBEC_GROUP_new_from_ecparameters()\fR will create a group from the
-specified \fBparams\fR and
-\&\fBEC_GROUP_new_from_ecpkparameters()\fR will create a group from the specific \s-1PK\s0 \fBparams\fR.
+A new curve can be constructed by calling \fBEC_GROUP_new()\fR, using the
+implementation provided by \fBmeth\fR (see \fBEC_GFp_simple_method\fR\|(3)). It is then
+necessary to call \fBEC_GROUP_set_curve()\fR to set the curve parameters.
+\&\fBEC_GROUP_new_from_ecparameters()\fR will create a group from the specified
+\&\fBparams\fR and \fBEC_GROUP_new_from_ecpkparameters()\fR will create a group from the
+specific \s-1PK\s0 \fBparams\fR.
.PP
-\&\fBEC_GROUP_set_curve()\fR sets the curve parameters \fBp\fR, \fBa\fR and \fBb\fR. For a curve over Fp \fBb\fR
-is the prime for the field. For a curve over F2^m \fBp\fR represents the irreducible polynomial \- each bit
-represents a term in the polynomial. Therefore there will either be three or five bits set dependent on whether
-the polynomial is a trinomial or a pentanomial.
+\&\fBEC_GROUP_set_curve()\fR sets the curve parameters \fBp\fR, \fBa\fR and \fBb\fR. For a curve
+over Fp \fBp\fR is the prime for the field. For a curve over F2^m \fBp\fR represents
+the irreducible polynomial \- each bit represents a term in the polynomial.
+Therefore there will either be three or five bits set dependent on whether the
+polynomial is a trinomial or a pentanomial.
+In either case, \fBa\fR and \fBb\fR represents the coefficients a and b from the
+relevant equation introduced above.
.PP
\&\fBEC_group_get_curve()\fR obtains the previously set curve parameters.
.PP
-\&\fBEC_GROUP_set_curve_GFp()\fR and \fBEC_GROUP_set_curve_GF2m()\fR are synonyms for \fBEC_GROUP_set_curve()\fR. They are defined for
-backwards compatibility only and should not be used.
+\&\fBEC_GROUP_set_curve_GFp()\fR and \fBEC_GROUP_set_curve_GF2m()\fR are synonyms for
+\&\fBEC_GROUP_set_curve()\fR. They are defined for backwards compatibility only and
+should not be used.
.PP
-\&\fBEC_GROUP_get_curve_GFp()\fR and \fBEC_GROUP_get_curve_GF2m()\fR are synonyms for \fBEC_GROUP_get_curve()\fR. They are defined for
-backwards compatibility only and should not be used.
+\&\fBEC_GROUP_get_curve_GFp()\fR and \fBEC_GROUP_get_curve_GF2m()\fR are synonyms for
+\&\fBEC_GROUP_get_curve()\fR. They are defined for backwards compatibility only and
+should not be used.
.PP
-The functions EC_GROUP_new_curve_GFp and EC_GROUP_new_curve_GF2m are shortcuts for calling EC_GROUP_new and then the
-EC_GROUP_set_curve function. An appropriate default implementation method will be used.
+The functions \fBEC_GROUP_new_curve_GFp()\fR and \fBEC_GROUP_new_curve_GF2m()\fR are
+shortcuts for calling \fBEC_GROUP_new()\fR and then the \fBEC_GROUP_set_curve()\fR function.
+An appropriate default implementation method will be used.
.PP
-Whilst the library can be used to create any curve using the functions described above, there are also a number of
-predefined curves that are available. In order to obtain a list of all of the predefined curves, call the function
-EC_get_builtin_curves. The parameter \fBr\fR should be an array of EC_builtin_curve structures of size \fBnitems\fR. The function
-will populate the \fBr\fR array with information about the builtin curves. If \fBnitems\fR is less than the total number of
-curves available, then the first \fBnitems\fR curves will be returned. Otherwise the total number of curves will be
-provided. The return value is the total number of curves available (whether that number has been populated in \fBr\fR or
-not). Passing a \s-1NULL\s0 \fBr\fR, or setting \fBnitems\fR to 0 will do nothing other than return the total number of curves available.
+Whilst the library can be used to create any curve using the functions described
+above, there are also a number of predefined curves that are available. In order
+to obtain a list of all of the predefined curves, call the function
+\&\fBEC_get_builtin_curves()\fR. The parameter \fBr\fR should be an array of
+EC_builtin_curve structures of size \fBnitems\fR. The function will populate the
+\&\fBr\fR array with information about the builtin curves. If \fBnitems\fR is less than
+the total number of curves available, then the first \fBnitems\fR curves will be
+returned. Otherwise the total number of curves will be provided. The return
+value is the total number of curves available (whether that number has been
+populated in \fBr\fR or not). Passing a \s-1NULL\s0 \fBr\fR, or setting \fBnitems\fR to 0 will
+do nothing other than return the total number of curves available.
The EC_builtin_curve structure is defined as follows:
.PP
.Vb 4
@@ -229,23 +243,27 @@ The EC_builtin_curve structure is defined as follows:
\& } EC_builtin_curve;
.Ve
.PP
-Each EC_builtin_curve item has a unique integer id (\fBnid\fR), and a human readable comment string describing the curve.
+Each EC_builtin_curve item has a unique integer id (\fBnid\fR), and a human
+readable comment string describing the curve.
.PP
-In order to construct a builtin curve use the function EC_GROUP_new_by_curve_name and provide the \fBnid\fR of the curve to
+In order to construct a builtin curve use the function
+\&\fBEC_GROUP_new_by_curve_name()\fR and provide the \fBnid\fR of the curve to
be constructed.
.PP
-EC_GROUP_free frees the memory associated with the \s-1EC_GROUP.\s0
+\&\fBEC_GROUP_free()\fR frees the memory associated with the \s-1EC_GROUP.\s0
If \fBgroup\fR is \s-1NULL\s0 nothing is done.
.PP
-EC_GROUP_clear_free destroys any sensitive data held within the \s-1EC_GROUP\s0 and then frees its memory.
-If \fBgroup\fR is \s-1NULL\s0 nothing is done.
+\&\fBEC_GROUP_clear_free()\fR destroys any sensitive data held within the \s-1EC_GROUP\s0 and
+then frees its memory. If \fBgroup\fR is \s-1NULL\s0 nothing is done.
.SH "RETURN VALUES"
.IX Header "RETURN VALUES"
-All EC_GROUP_new* functions return a pointer to the newly constructed group, or \s-1NULL\s0 on error.
+All EC_GROUP_new* functions return a pointer to the newly constructed group, or
+\&\s-1NULL\s0 on error.
.PP
-EC_get_builtin_curves returns the number of builtin curves that are available.
+\&\fBEC_get_builtin_curves()\fR returns the number of builtin curves that are available.
.PP
-EC_GROUP_set_curve_GFp, EC_GROUP_get_curve_GFp, EC_GROUP_set_curve_GF2m, EC_GROUP_get_curve_GF2m return 1 on success or 0 on error.
+\&\fBEC_GROUP_set_curve_GFp()\fR, \fBEC_GROUP_get_curve_GFp()\fR, \fBEC_GROUP_set_curve_GF2m()\fR,
+\&\fBEC_GROUP_get_curve_GF2m()\fR return 1 on success or 0 on error.
.SH "SEE ALSO"
.IX Header "SEE ALSO"
\&\fBcrypto\fR\|(7), \fBEC_GROUP_copy\fR\|(3),
@@ -253,7 +271,7 @@ EC_GROUP_set_curve_GFp, EC_GROUP_get_curve_GFp, EC_GROUP_set_curve_GF2m, EC_GROU
\&\fBEC_GFp_simple_method\fR\|(3), \fBd2i_ECPKParameters\fR\|(3)
.SH "COPYRIGHT"
.IX Header "COPYRIGHT"
-Copyright 2013\-2018 The OpenSSL Project Authors. All Rights Reserved.
+Copyright 2013\-2019 The OpenSSL Project Authors. All Rights Reserved.
.PP
Licensed under the OpenSSL license (the \*(L"License\*(R"). You may not use
this file except in compliance with the License. You can obtain a copy
diff --git a/secure/lib/libcrypto/man/man3/EC_KEY_get_enc_flags.3 b/secure/lib/libcrypto/man/man3/EC_KEY_get_enc_flags.3
index 7c23945de85fa..3af6992d250cb 100644
--- a/secure/lib/libcrypto/man/man3/EC_KEY_get_enc_flags.3
+++ b/secure/lib/libcrypto/man/man3/EC_KEY_get_enc_flags.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EC_KEY_GET_ENC_FLAGS 3"
-.TH EC_KEY_GET_ENC_FLAGS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EC_KEY_GET_ENC_FLAGS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EC_KEY_new.3 b/secure/lib/libcrypto/man/man3/EC_KEY_new.3
index fab02e549f2bc..5b9d9dc9dfd00 100644
--- a/secure/lib/libcrypto/man/man3/EC_KEY_new.3
+++ b/secure/lib/libcrypto/man/man3/EC_KEY_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EC_KEY_NEW 3"
-.TH EC_KEY_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EC_KEY_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EC_POINT_add.3 b/secure/lib/libcrypto/man/man3/EC_POINT_add.3
index 7f215809be920..a27cb6fd9d8ca 100644
--- a/secure/lib/libcrypto/man/man3/EC_POINT_add.3
+++ b/secure/lib/libcrypto/man/man3/EC_POINT_add.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EC_POINT_ADD 3"
-.TH EC_POINT_ADD 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EC_POINT_ADD 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EC_POINT_new.3 b/secure/lib/libcrypto/man/man3/EC_POINT_new.3
index fd5139878a171..2c010d883a2b0 100644
--- a/secure/lib/libcrypto/man/man3/EC_POINT_new.3
+++ b/secure/lib/libcrypto/man/man3/EC_POINT_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EC_POINT_NEW 3"
-.TH EC_POINT_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EC_POINT_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
@@ -282,6 +282,26 @@ The functions \fBEC_POINT_point2oct()\fR, \fBEC_POINT_oct2point()\fR, \fBEC_POIN
\&\fBEC_POINT_bn2point()\fR, \fBEC_POINT_point2hex()\fR and \fBEC_POINT_hex2point()\fR convert from
and to EC_POINTs for the formats: octet, \s-1BIGNUM\s0 and hexadecimal respectively.
.PP
+The function \fBEC_POINT_point2oct()\fR encodes the given curve point \fBp\fR as an
+octet string into the buffer \fBbuf\fR of size \fBlen\fR, using the specified
+conversion form \fBform\fR.
+The encoding conforms with Sec. 2.3.3 of the \s-1SECG SEC 1\s0 (\*(L"Elliptic Curve
+Cryptography\*(R") standard.
+Similarly the function \fBEC_POINT_oct2point()\fR decodes a curve point into \fBp\fR from
+the octet string contained in the given buffer \fBbuf\fR of size \fBlen\fR, conforming
+to Sec. 2.3.4 of the \s-1SECG SEC 1\s0 (\*(L"Elliptic Curve Cryptography\*(R") standard.
+.PP
+The functions \fBEC_POINT_point2hex()\fR and \fBEC_POINT_point2bn()\fR convert a point \fBp\fR,
+respectively, to the hexadecimal or \s-1BIGNUM\s0 representation of the same
+encoding of the function \fBEC_POINT_point2oct()\fR.
+Vice versa, similarly to the function \fBEC_POINT_oct2point()\fR, the functions
+\&\fBEC_POINT_hex2point()\fR and \fBEC_POINT_point2bn()\fR decode the hexadecimal or
+\&\s-1BIGNUM\s0 representation into the \s-1EC_POINT\s0 \fBp\fR.
+.PP
+Notice that, according to the standard, the octet string encoding of the point
+at infinity for a given curve is fixed to a single octet of value zero and that,
+vice versa, a single octet of size zero is decoded as the point at infinity.
+.PP
The function \fBEC_POINT_point2oct()\fR must be supplied with a buffer long enough to
store the octet form. The return value provides the number of octets stored.
Calling the function with a \s-1NULL\s0 buffer will not perform the conversion but
diff --git a/secure/lib/libcrypto/man/man3/ENGINE_add.3 b/secure/lib/libcrypto/man/man3/ENGINE_add.3
index 197b02a51967c..e856741175c7b 100644
--- a/secure/lib/libcrypto/man/man3/ENGINE_add.3
+++ b/secure/lib/libcrypto/man/man3/ENGINE_add.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "ENGINE_ADD 3"
-.TH ENGINE_ADD 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH ENGINE_ADD 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/ERR_GET_LIB.3 b/secure/lib/libcrypto/man/man3/ERR_GET_LIB.3
index fc5445cc8a35f..c9ff16d11d988 100644
--- a/secure/lib/libcrypto/man/man3/ERR_GET_LIB.3
+++ b/secure/lib/libcrypto/man/man3/ERR_GET_LIB.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "ERR_GET_LIB 3"
-.TH ERR_GET_LIB 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH ERR_GET_LIB 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/ERR_clear_error.3 b/secure/lib/libcrypto/man/man3/ERR_clear_error.3
index 0e99443deae41..b65193b44d0cc 100644
--- a/secure/lib/libcrypto/man/man3/ERR_clear_error.3
+++ b/secure/lib/libcrypto/man/man3/ERR_clear_error.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "ERR_CLEAR_ERROR 3"
-.TH ERR_CLEAR_ERROR 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH ERR_CLEAR_ERROR 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/ERR_error_string.3 b/secure/lib/libcrypto/man/man3/ERR_error_string.3
index 728539c2b58d9..bfb4060d29b68 100644
--- a/secure/lib/libcrypto/man/man3/ERR_error_string.3
+++ b/secure/lib/libcrypto/man/man3/ERR_error_string.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "ERR_ERROR_STRING 3"
-.TH ERR_ERROR_STRING 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH ERR_ERROR_STRING 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/ERR_get_error.3 b/secure/lib/libcrypto/man/man3/ERR_get_error.3
index d55b4e4718592..71fb65d6fe79c 100644
--- a/secure/lib/libcrypto/man/man3/ERR_get_error.3
+++ b/secure/lib/libcrypto/man/man3/ERR_get_error.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "ERR_GET_ERROR 3"
-.TH ERR_GET_ERROR 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH ERR_GET_ERROR 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/ERR_load_crypto_strings.3 b/secure/lib/libcrypto/man/man3/ERR_load_crypto_strings.3
index 8936aa76f2b9a..cddd50ed25bca 100644
--- a/secure/lib/libcrypto/man/man3/ERR_load_crypto_strings.3
+++ b/secure/lib/libcrypto/man/man3/ERR_load_crypto_strings.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "ERR_LOAD_CRYPTO_STRINGS 3"
-.TH ERR_LOAD_CRYPTO_STRINGS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH ERR_LOAD_CRYPTO_STRINGS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/ERR_load_strings.3 b/secure/lib/libcrypto/man/man3/ERR_load_strings.3
index a0980ad5c12b6..9ed52571f4872 100644
--- a/secure/lib/libcrypto/man/man3/ERR_load_strings.3
+++ b/secure/lib/libcrypto/man/man3/ERR_load_strings.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "ERR_LOAD_STRINGS 3"
-.TH ERR_LOAD_STRINGS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH ERR_LOAD_STRINGS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/ERR_print_errors.3 b/secure/lib/libcrypto/man/man3/ERR_print_errors.3
index 6edc96eb6a4c2..92986c58f48bb 100644
--- a/secure/lib/libcrypto/man/man3/ERR_print_errors.3
+++ b/secure/lib/libcrypto/man/man3/ERR_print_errors.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "ERR_PRINT_ERRORS 3"
-.TH ERR_PRINT_ERRORS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH ERR_PRINT_ERRORS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/ERR_put_error.3 b/secure/lib/libcrypto/man/man3/ERR_put_error.3
index e0e26bbd7cbe9..7f9f0b5b87c45 100644
--- a/secure/lib/libcrypto/man/man3/ERR_put_error.3
+++ b/secure/lib/libcrypto/man/man3/ERR_put_error.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "ERR_PUT_ERROR 3"
-.TH ERR_PUT_ERROR 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH ERR_PUT_ERROR 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/ERR_remove_state.3 b/secure/lib/libcrypto/man/man3/ERR_remove_state.3
index b70c5d5652f7a..1750ca498f20d 100644
--- a/secure/lib/libcrypto/man/man3/ERR_remove_state.3
+++ b/secure/lib/libcrypto/man/man3/ERR_remove_state.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "ERR_REMOVE_STATE 3"
-.TH ERR_REMOVE_STATE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH ERR_REMOVE_STATE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/ERR_set_mark.3 b/secure/lib/libcrypto/man/man3/ERR_set_mark.3
index 8edb3254f8815..af0cfc996c4ce 100644
--- a/secure/lib/libcrypto/man/man3/ERR_set_mark.3
+++ b/secure/lib/libcrypto/man/man3/ERR_set_mark.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "ERR_SET_MARK 3"
-.TH ERR_SET_MARK 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH ERR_SET_MARK 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_BytesToKey.3 b/secure/lib/libcrypto/man/man3/EVP_BytesToKey.3
index 1b5a33460acb8..4bdefacef7f84 100644
--- a/secure/lib/libcrypto/man/man3/EVP_BytesToKey.3
+++ b/secure/lib/libcrypto/man/man3/EVP_BytesToKey.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_BYTESTOKEY 3"
-.TH EVP_BYTESTOKEY 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_BYTESTOKEY 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_CIPHER_CTX_get_cipher_data.3 b/secure/lib/libcrypto/man/man3/EVP_CIPHER_CTX_get_cipher_data.3
index 4941cc7e80c0e..aa4a6fc161bc3 100644
--- a/secure/lib/libcrypto/man/man3/EVP_CIPHER_CTX_get_cipher_data.3
+++ b/secure/lib/libcrypto/man/man3/EVP_CIPHER_CTX_get_cipher_data.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_CIPHER_CTX_GET_CIPHER_DATA 3"
-.TH EVP_CIPHER_CTX_GET_CIPHER_DATA 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_CIPHER_CTX_GET_CIPHER_DATA 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_CIPHER_meth_new.3 b/secure/lib/libcrypto/man/man3/EVP_CIPHER_meth_new.3
index 4da560cc7a79e..253167340c19f 100644
--- a/secure/lib/libcrypto/man/man3/EVP_CIPHER_meth_new.3
+++ b/secure/lib/libcrypto/man/man3/EVP_CIPHER_meth_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_CIPHER_METH_NEW 3"
-.TH EVP_CIPHER_METH_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_CIPHER_METH_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_DigestInit.3 b/secure/lib/libcrypto/man/man3/EVP_DigestInit.3
index 4b9e41e3d316d..090a4d2a7b2fc 100644
--- a/secure/lib/libcrypto/man/man3/EVP_DigestInit.3
+++ b/secure/lib/libcrypto/man/man3/EVP_DigestInit.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,13 +133,13 @@
.\" ========================================================================
.\"
.IX Title "EVP_DIGESTINIT 3"
-.TH EVP_DIGESTINIT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_DIGESTINIT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
.nh
.SH "NAME"
-EVP_MD_CTX_new, EVP_MD_CTX_reset, EVP_MD_CTX_free, EVP_MD_CTX_copy_ex, EVP_MD_CTX_ctrl, EVP_MD_CTX_set_flags, EVP_MD_CTX_clear_flags, EVP_MD_CTX_test_flags, EVP_DigestInit_ex, EVP_DigestInit, EVP_DigestUpdate, EVP_DigestFinal_ex, EVP_DigestFinalXOF, EVP_DigestFinal, EVP_MD_CTX_copy, EVP_MD_type, EVP_MD_pkey_type, EVP_MD_size, EVP_MD_block_size, EVP_MD_CTX_md, EVP_MD_CTX_size, EVP_MD_CTX_block_size, EVP_MD_CTX_type, EVP_MD_CTX_md_data, EVP_md_null, EVP_get_digestbyname, EVP_get_digestbynid, EVP_get_digestbyobj, EVP_MD_CTX_set_pkey_ctx \- EVP digest routines
+EVP_MD_CTX_new, EVP_MD_CTX_reset, EVP_MD_CTX_free, EVP_MD_CTX_copy, EVP_MD_CTX_copy_ex, EVP_MD_CTX_ctrl, EVP_MD_CTX_set_flags, EVP_MD_CTX_clear_flags, EVP_MD_CTX_test_flags, EVP_Digest, EVP_DigestInit_ex, EVP_DigestInit, EVP_DigestUpdate, EVP_DigestFinal_ex, EVP_DigestFinalXOF, EVP_DigestFinal, EVP_MD_type, EVP_MD_pkey_type, EVP_MD_size, EVP_MD_block_size, EVP_MD_flags, EVP_MD_CTX_md, EVP_MD_CTX_type, EVP_MD_CTX_size, EVP_MD_CTX_block_size, EVP_MD_CTX_md_data, EVP_MD_CTX_update_fn, EVP_MD_CTX_set_update_fn, EVP_md_null, EVP_get_digestbyname, EVP_get_digestbynid, EVP_get_digestbyobj, EVP_MD_CTX_pkey_ctx, EVP_MD_CTX_set_pkey_ctx \- EVP digest routines
.SH "SYNOPSIS"
.IX Header "SYNOPSIS"
.Vb 1
@@ -153,6 +153,8 @@ EVP_MD_CTX_new, EVP_MD_CTX_reset, EVP_MD_CTX_free, EVP_MD_CTX_copy_ex, EVP_MD_CT
\& void EVP_MD_CTX_clear_flags(EVP_MD_CTX *ctx, int flags);
\& int EVP_MD_CTX_test_flags(const EVP_MD_CTX *ctx, int flags);
\&
+\& int EVP_Digest(const void *data, size_t count, unsigned char *md,
+\& unsigned int *size, const EVP_MD *type, ENGINE *impl);
\& int EVP_DigestInit_ex(EVP_MD_CTX *ctx, const EVP_MD *type, ENGINE *impl);
\& int EVP_DigestUpdate(EVP_MD_CTX *ctx, const void *d, size_t cnt);
\& int EVP_DigestFinal_ex(EVP_MD_CTX *ctx, unsigned char *md, unsigned int *s);
@@ -169,12 +171,18 @@ EVP_MD_CTX_new, EVP_MD_CTX_reset, EVP_MD_CTX_free, EVP_MD_CTX_copy_ex, EVP_MD_CT
\& int EVP_MD_pkey_type(const EVP_MD *md);
\& int EVP_MD_size(const EVP_MD *md);
\& int EVP_MD_block_size(const EVP_MD *md);
+\& unsigned long EVP_MD_flags(const EVP_MD *md);
\&
\& const EVP_MD *EVP_MD_CTX_md(const EVP_MD_CTX *ctx);
-\& int EVP_MD_CTX_size(const EVP_MD *ctx);
-\& int EVP_MD_CTX_block_size(const EVP_MD *ctx);
-\& int EVP_MD_CTX_type(const EVP_MD *ctx);
+\& int EVP_MD_CTX_size(const EVP_MD_CTX *ctx);
+\& int EVP_MD_CTX_block_size(const EVP_MD_CTX *ctx);
+\& int EVP_MD_CTX_type(const EVP_MD_CTX *ctx);
\& void *EVP_MD_CTX_md_data(const EVP_MD_CTX *ctx);
+\& int (*EVP_MD_CTX_update_fn(EVP_MD_CTX *ctx))(EVP_MD_CTX *ctx,
+\& const void *data, size_t count);
+\& void EVP_MD_CTX_set_update_fn(EVP_MD_CTX *ctx,
+\& int (*update)(EVP_MD_CTX *ctx,
+\& const void *data, size_t count));
\&
\& const EVP_MD *EVP_md_null(void);
\&
@@ -182,6 +190,7 @@ EVP_MD_CTX_new, EVP_MD_CTX_reset, EVP_MD_CTX_free, EVP_MD_CTX_copy_ex, EVP_MD_CT
\& const EVP_MD *EVP_get_digestbynid(int type);
\& const EVP_MD *EVP_get_digestbyobj(const ASN1_OBJECT *o);
\&
+\& EVP_PKEY_CTX *EVP_MD_CTX_pkey_ctx(const EVP_MD_CTX *ctx);
\& void EVP_MD_CTX_set_pkey_ctx(EVP_MD_CTX *ctx, EVP_PKEY_CTX *pctx);
.Ve
.SH "DESCRIPTION"
@@ -200,10 +209,21 @@ existing context.
Cleans up digest context \fBctx\fR and frees up the space allocated to it.
.IP "\fBEVP_MD_CTX_ctrl()\fR" 4
.IX Item "EVP_MD_CTX_ctrl()"
-Performs digest-specific control actions on context \fBctx\fR.
+Performs digest-specific control actions on context \fBctx\fR. The control command
+is indicated in \fBcmd\fR and any additional arguments in \fBp1\fR and \fBp2\fR.
+\&\fBEVP_MD_CTX_ctrl()\fR must be called after \fBEVP_DigestInit_ex()\fR. Other restrictions
+may apply depending on the control type and digest implementation.
+See \*(L"\s-1CONTROLS\*(R"\s0 below for more information.
.IP "\fBEVP_MD_CTX_set_flags()\fR, \fBEVP_MD_CTX_clear_flags()\fR, \fBEVP_MD_CTX_test_flags()\fR" 4
.IX Item "EVP_MD_CTX_set_flags(), EVP_MD_CTX_clear_flags(), EVP_MD_CTX_test_flags()"
Sets, clears and tests \fBctx\fR flags. See \*(L"\s-1FLAGS\*(R"\s0 below for more information.
+.IP "\fBEVP_Digest()\fR" 4
+.IX Item "EVP_Digest()"
+A wrapper around the Digest Init_ex, Update and Final_ex functions.
+Hashes \fBcount\fR bytes of data at \fBdata\fR using a digest \fBtype\fR from \s-1ENGINE\s0
+\&\fBimpl\fR. The digest value is placed in \fBmd\fR and its length is written at \fBsize\fR
+if the pointer is not \s-1NULL.\s0 At most \fB\s-1EVP_MAX_MD_SIZE\s0\fR bytes will be written.
+If \fBimpl\fR is \s-1NULL\s0 the default implementation of digest \fBtype\fR is used.
.IP "\fBEVP_DigestInit_ex()\fR" 4
.IX Item "EVP_DigestInit_ex()"
Sets up digest context \fBctx\fR to use a digest \fBtype\fR from \s-1ENGINE\s0 \fBimpl\fR.
@@ -236,7 +256,7 @@ few bytes.
.IP "\fBEVP_DigestInit()\fR" 4
.IX Item "EVP_DigestInit()"
Behaves in the same way as \fBEVP_DigestInit_ex()\fR except it always uses the
-default digest implementation.
+default digest implementation and calls \fBEVP_MD_CTX_reset()\fR.
.IP "\fBEVP_DigestFinal()\fR" 4
.IX Item "EVP_DigestFinal()"
Similar to \fBEVP_DigestFinal_ex()\fR except the digest context \fBctx\fR is
@@ -266,6 +286,18 @@ The space is allocated by OpenSSL and has the size originally set with
.IP "\fBEVP_MD_CTX_md()\fR" 4
.IX Item "EVP_MD_CTX_md()"
Returns the \fB\s-1EVP_MD\s0\fR structure corresponding to the passed \fB\s-1EVP_MD_CTX\s0\fR.
+.IP "\fBEVP_MD_CTX_set_update_fn()\fR" 4
+.IX Item "EVP_MD_CTX_set_update_fn()"
+Sets the update function for \fBctx\fR to \fBupdate\fR.
+This is the function that is called by EVP_DigestUpdate. If not set, the
+update function from the \fB\s-1EVP_MD\s0\fR type specified at initialization is used.
+.IP "\fBEVP_MD_CTX_update_fn()\fR" 4
+.IX Item "EVP_MD_CTX_update_fn()"
+Returns the update function for \fBctx\fR.
+.IP "\fBEVP_MD_flags()\fR" 4
+.IX Item "EVP_MD_flags()"
+Returns the \fBmd\fR flags. Note that these are different from the \fB\s-1EVP_MD_CTX\s0\fR
+ones. See \fBEVP_MD_meth_set_flags\fR\|(3) for more information.
.IP "\fBEVP_MD_pkey_type()\fR" 4
.IX Item "EVP_MD_pkey_type()"
Returns the \s-1NID\s0 of the public key signing algorithm associated with this
@@ -280,14 +312,32 @@ length.
.IX Item "EVP_get_digestbyname(), EVP_get_digestbynid(), EVP_get_digestbyobj()"
Returns an \fB\s-1EVP_MD\s0\fR structure when passed a digest name, a digest \fB\s-1NID\s0\fR or an
\&\fB\s-1ASN1_OBJECT\s0\fR structure respectively.
+.IP "\fBEVP_MD_CTX_pkey_ctx()\fR" 4
+.IX Item "EVP_MD_CTX_pkey_ctx()"
+Returns the \fB\s-1EVP_PKEY_CTX\s0\fR assigned to \fBctx\fR. The returned pointer should not
+be freed by the caller.
.IP "\fBEVP_MD_CTX_set_pkey_ctx()\fR" 4
.IX Item "EVP_MD_CTX_set_pkey_ctx()"
Assigns an \fB\s-1EVP_PKEY_CTX\s0\fR to \fB\s-1EVP_MD_CTX\s0\fR. This is usually used to provide
-a customzied \fB\s-1EVP_PKEY_CTX\s0\fR to \fBEVP_DigestSignInit\fR\|(3) or
+a customized \fB\s-1EVP_PKEY_CTX\s0\fR to \fBEVP_DigestSignInit\fR\|(3) or
\&\fBEVP_DigestVerifyInit\fR\|(3). The \fBpctx\fR passed to this function should be freed
by the caller. A \s-1NULL\s0 \fBpctx\fR pointer is also allowed to clear the \fB\s-1EVP_PKEY_CTX\s0\fR
assigned to \fBctx\fR. In such case, freeing the cleared \fB\s-1EVP_PKEY_CTX\s0\fR or not
depends on how the \fB\s-1EVP_PKEY_CTX\s0\fR is created.
+.SH "CONTROLS"
+.IX Header "CONTROLS"
+\&\fBEVP_MD_CTX_ctrl()\fR can be used to send the following standard controls:
+.IP "\s-1EVP_MD_CTRL_MICALG\s0" 4
+.IX Item "EVP_MD_CTRL_MICALG"
+Gets the digest Message Integrity Check algorithm string. This is used when
+creating S/MIME multipart/signed messages, as specified in \s-1RFC 3851.\s0
+The string value is written to \fBp2\fR.
+.IP "\s-1EVP_MD_CTRL_XOF_LEN\s0" 4
+.IX Item "EVP_MD_CTRL_XOF_LEN"
+This control sets the digest length for extendable output functions to \fBp1\fR.
+Sending this control directly should not be necessary, the use of
+\&\f(CW\*(C`EVP_DigestFinalXOF()\*(C'\fR is preferred.
+Currently used by \s-1SHAKE.\s0
.SH "FLAGS"
.IX Header "FLAGS"
\&\fBEVP_MD_CTX_set_flags()\fR, \fBEVP_MD_CTX_clear_flags()\fR and \fBEVP_MD_CTX_test_flags()\fR
@@ -317,8 +367,8 @@ Returns 1 if successful or 0 for failure.
.IP "\fBEVP_MD_CTX_copy_ex()\fR" 4
.IX Item "EVP_MD_CTX_copy_ex()"
Returns 1 if successful or 0 for failure.
-.IP "\fBEVP_MD_type()\fR, \fBEVP_MD_pkey_type()\fR, \fBEVP_MD_type()\fR" 4
-.IX Item "EVP_MD_type(), EVP_MD_pkey_type(), EVP_MD_type()"
+.IP "\fBEVP_MD_type()\fR, \fBEVP_MD_pkey_type()\fR" 4
+.IX Item "EVP_MD_type(), EVP_MD_pkey_type()"
Returns the \s-1NID\s0 of the corresponding \s-1OBJECT IDENTIFIER\s0 or NID_undef if none
exists.
.IP "\fBEVP_MD_size()\fR, \fBEVP_MD_block_size()\fR, \fBEVP_MD_CTX_size()\fR, \fBEVP_MD_CTX_block_size()\fR" 4
@@ -409,6 +459,7 @@ digest name passed on the command line.
.Ve
.SH "SEE ALSO"
.IX Header "SEE ALSO"
+\&\fBEVP_MD_meth_new\fR\|(3),
\&\fBdgst\fR\|(1),
\&\fBevp\fR\|(7)
.PP
diff --git a/secure/lib/libcrypto/man/man3/EVP_DigestSignInit.3 b/secure/lib/libcrypto/man/man3/EVP_DigestSignInit.3
index 0524fd067ddd1..741f4457062b6 100644
--- a/secure/lib/libcrypto/man/man3/EVP_DigestSignInit.3
+++ b/secure/lib/libcrypto/man/man3/EVP_DigestSignInit.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_DIGESTSIGNINIT 3"
-.TH EVP_DIGESTSIGNINIT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_DIGESTSIGNINIT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
@@ -172,7 +172,7 @@ inside \fBEVP_DigestSignInit()\fR and it will be freed automatically when the
The digest \fBtype\fR may be \s-1NULL\s0 if the signing algorithm supports it.
.PP
No \fB\s-1EVP_PKEY_CTX\s0\fR will be created by \fBEVP_DigestSignInit()\fR if the passed \fBctx\fR
-has already been assigned one via \fBEVP_MD_CTX_set_ctx\fR\|(3). See also \s-1\fBSM2\s0\fR\|(7).
+has already been assigned one via \fBEVP_MD_CTX_set_pkey_ctx\fR\|(3). See also \s-1\fBSM2\s0\fR\|(7).
.PP
Only \s-1EVP_PKEY\s0 types that support signing can be used with these functions. This
includes \s-1MAC\s0 algorithms where the \s-1MAC\s0 generation is considered as a form of
@@ -225,10 +225,8 @@ signature in \fBsig\fR and its length in \fBsiglen\fR in a similar way to
\&\fBEVP_DigestSignFinal()\fR.
.SH "RETURN VALUES"
.IX Header "RETURN VALUES"
-\&\fBEVP_DigestSignInit()\fR, \fBEVP_DigestSignUpdate()\fR, \fBEVP_DigestSignaFinal()\fR and
-\&\fBEVP_DigestSign()\fR return 1 for success and 0 or a negative value for failure. In
-particular, a return value of \-2 indicates the operation is not supported by the
-public key algorithm.
+\&\fBEVP_DigestSignInit()\fR, \fBEVP_DigestSignUpdate()\fR, \fBEVP_DigestSignFinal()\fR and
+\&\fBEVP_DigestSign()\fR return 1 for success and 0 for failure.
.PP
The error codes can be obtained from \fBERR_get_error\fR\|(3).
.SH "NOTES"
@@ -277,7 +275,7 @@ which indicates the maximum possible signature for any set of parameters.
were added in OpenSSL 1.0.0.
.SH "COPYRIGHT"
.IX Header "COPYRIGHT"
-Copyright 2006\-2019 The OpenSSL Project Authors. All Rights Reserved.
+Copyright 2006\-2020 The OpenSSL Project Authors. All Rights Reserved.
.PP
Licensed under the OpenSSL license (the \*(L"License\*(R"). You may not use
this file except in compliance with the License. You can obtain a copy
diff --git a/secure/lib/libcrypto/man/man3/EVP_DigestVerifyInit.3 b/secure/lib/libcrypto/man/man3/EVP_DigestVerifyInit.3
index c303fd85df81c..4eba134ad722c 100644
--- a/secure/lib/libcrypto/man/man3/EVP_DigestVerifyInit.3
+++ b/secure/lib/libcrypto/man/man3/EVP_DigestVerifyInit.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_DIGESTVERIFYINIT 3"
-.TH EVP_DIGESTVERIFYINIT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_DIGESTVERIFYINIT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
@@ -169,7 +169,7 @@ inside \fBEVP_DigestVerifyInit()\fR and it will be freed automatically when the
\&\s-1EVP_MD_CTX\s0 is freed).
.PP
No \fB\s-1EVP_PKEY_CTX\s0\fR will be created by \fBEVP_DigestSignInit()\fR if the passed \fBctx\fR
-has already been assigned one via \fBEVP_MD_CTX_set_ctx\fR\|(3). See also \s-1\fBSM2\s0\fR\|(7).
+has already been assigned one via \fBEVP_MD_CTX_set_pkey_ctx\fR\|(3). See also \s-1\fBSM2\s0\fR\|(7).
.PP
\&\fBEVP_DigestVerifyUpdate()\fR hashes \fBcnt\fR bytes of data at \fBd\fR into the
verification context \fBctx\fR. This function can be called several times on the
diff --git a/secure/lib/libcrypto/man/man3/EVP_EncodeInit.3 b/secure/lib/libcrypto/man/man3/EVP_EncodeInit.3
index f23ef7efaf0c2..f6b7cef440ed0 100644
--- a/secure/lib/libcrypto/man/man3/EVP_EncodeInit.3
+++ b/secure/lib/libcrypto/man/man3/EVP_EncodeInit.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_ENCODEINIT 3"
-.TH EVP_ENCODEINIT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_ENCODEINIT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_EncryptInit.3 b/secure/lib/libcrypto/man/man3/EVP_EncryptInit.3
index f803c2de7549b..ae006cf57d1dc 100644
--- a/secure/lib/libcrypto/man/man3/EVP_EncryptInit.3
+++ b/secure/lib/libcrypto/man/man3/EVP_EncryptInit.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_ENCRYPTINIT 3"
-.TH EVP_ENCRYPTINIT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_ENCRYPTINIT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
@@ -550,6 +550,15 @@ compatibility with existing code. New code should use \fBEVP_EncryptInit_ex()\fR
\&\fBEVP_CipherInit_ex()\fR and \fBEVP_CipherFinal_ex()\fR because they can reuse an
existing context without allocating and freeing it up on each call.
.PP
+There are some differences between functions \fBEVP_CipherInit()\fR and
+\&\fBEVP_CipherInit_ex()\fR, significant in some circumstances. \fBEVP_CipherInit()\fR fills
+the passed context object with zeros. As a consequence, \fBEVP_CipherInit()\fR does
+not allow step-by-step initialization of the ctx when the \fIkey\fR and \fIiv\fR are
+passed in separate calls. It also means that the flags set for the \s-1CTX\s0 are
+removed, and it is especially important for the
+\&\fB\s-1EVP_CIPHER_CTX_FLAG_WRAP_ALLOW\s0\fR flag treated specially in
+\&\fBEVP_CipherInit_ex()\fR.
+.PP
\&\fBEVP_get_cipherbynid()\fR, and \fBEVP_get_cipherbyobj()\fR are implemented as macros.
.SH "BUGS"
.IX Header "BUGS"
diff --git a/secure/lib/libcrypto/man/man3/EVP_MD_meth_new.3 b/secure/lib/libcrypto/man/man3/EVP_MD_meth_new.3
index f472806291926..6996c6bee7c49 100644
--- a/secure/lib/libcrypto/man/man3/EVP_MD_meth_new.3
+++ b/secure/lib/libcrypto/man/man3/EVP_MD_meth_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_MD_METH_NEW 3"
-.TH EVP_MD_METH_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_MD_METH_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
@@ -208,7 +208,11 @@ behaviours in the particular \fBmd\fR. Several flags can be or'd
together. The available flags are:
.IP "\s-1EVP_MD_FLAG_ONESHOT\s0" 4
.IX Item "EVP_MD_FLAG_ONESHOT"
-This digest method can only handles one block of input.
+This digest method can only handle one block of input.
+.IP "\s-1EVP_MD_FLAG_XOF\s0" 4
+.IX Item "EVP_MD_FLAG_XOF"
+This digest method is an extensible-output function (\s-1XOF\s0) and supports
+the \fB\s-1EVP_MD_CTRL_XOF_LEN\s0\fR control.
.IP "\s-1EVP_MD_FLAG_DIGALGID_NULL\s0" 4
.IX Item "EVP_MD_FLAG_DIGALGID_NULL"
When setting up a DigestAlgorithmIdentifier, this flag will have the
@@ -225,18 +229,22 @@ Custom DigestAlgorithmIdentifier handling via ctrl, with
\&\fB\s-1EVP_MD_FLAG_DIGALGID_ABSENT\s0\fR as default. \fINote: if combined with
\&\s-1EVP_MD_FLAG_DIGALGID_NULL,\s0 the latter will be overridden.\fR
Currently unused.
+.IP "\s-1EVP_MD_FLAG_FIPS\s0" 4
+.IX Item "EVP_MD_FLAG_FIPS"
+This digest method is suitable for use in \s-1FIPS\s0 mode.
+Currently unused.
.PP
\&\fBEVP_MD_meth_set_init()\fR sets the digest init function for \fBmd\fR.
-The digest init function is called by \fBEVP_DigestInit()\fR,
+The digest init function is called by \fBEVP_Digest()\fR, \fBEVP_DigestInit()\fR,
\&\fBEVP_DigestInit_ex()\fR, EVP_SignInit, \fBEVP_SignInit_ex()\fR, \fBEVP_VerifyInit()\fR
and \fBEVP_VerifyInit_ex()\fR.
.PP
\&\fBEVP_MD_meth_set_update()\fR sets the digest update function for \fBmd\fR.
-The digest update function is called by \fBEVP_DigestUpdate()\fR,
+The digest update function is called by \fBEVP_Digest()\fR, \fBEVP_DigestUpdate()\fR and
\&\fBEVP_SignUpdate()\fR.
.PP
\&\fBEVP_MD_meth_set_final()\fR sets the digest final function for \fBmd\fR.
-The digest final function is called by \fBEVP_DigestFinal()\fR,
+The digest final function is called by \fBEVP_Digest()\fR, \fBEVP_DigestFinal()\fR,
\&\fBEVP_DigestFinal_ex()\fR, \fBEVP_SignFinal()\fR and \fBEVP_VerifyFinal()\fR.
.PP
\&\fBEVP_MD_meth_set_copy()\fR sets the function for \fBmd\fR to do extra
@@ -257,6 +265,7 @@ This cleanup function is called by \fBEVP_MD_CTX_reset()\fR and
\&\fBEVP_MD_CTX_free()\fR.
.PP
\&\fBEVP_MD_meth_set_ctrl()\fR sets the control function for \fBmd\fR.
+See \fBEVP_MD_CTX_ctrl\fR\|(3) for the available controls.
.PP
\&\fBEVP_MD_meth_get_input_blocksize()\fR, \fBEVP_MD_meth_get_result_size()\fR,
\&\fBEVP_MD_meth_get_app_datasize()\fR, \fBEVP_MD_meth_get_flags()\fR,
@@ -284,7 +293,7 @@ The \fB\s-1EVP_MD\s0\fR structure was openly available in OpenSSL before version
1.1. The functions described here were added in OpenSSL 1.1.
.SH "COPYRIGHT"
.IX Header "COPYRIGHT"
-Copyright 2015\-2017 The OpenSSL Project Authors. All Rights Reserved.
+Copyright 2015\-2018 The OpenSSL Project Authors. All Rights Reserved.
.PP
Licensed under the OpenSSL license (the \*(L"License\*(R"). You may not use
this file except in compliance with the License. You can obtain a copy
diff --git a/secure/lib/libcrypto/man/man3/EVP_OpenInit.3 b/secure/lib/libcrypto/man/man3/EVP_OpenInit.3
index e67fe0dc5fe47..ac0b6e2977d71 100644
--- a/secure/lib/libcrypto/man/man3/EVP_OpenInit.3
+++ b/secure/lib/libcrypto/man/man3/EVP_OpenInit.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_OPENINIT 3"
-.TH EVP_OPENINIT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_OPENINIT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_PKEY_ASN1_METHOD.3 b/secure/lib/libcrypto/man/man3/EVP_PKEY_ASN1_METHOD.3
index 5eb33834e0912..43e2583e19af2 100644
--- a/secure/lib/libcrypto/man/man3/EVP_PKEY_ASN1_METHOD.3
+++ b/secure/lib/libcrypto/man/man3/EVP_PKEY_ASN1_METHOD.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_PKEY_ASN1_METHOD 3"
-.TH EVP_PKEY_ASN1_METHOD 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_PKEY_ASN1_METHOD 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_PKEY_CTX_ctrl.3 b/secure/lib/libcrypto/man/man3/EVP_PKEY_CTX_ctrl.3
index c580f29e51cce..6d6770b3b52f2 100644
--- a/secure/lib/libcrypto/man/man3/EVP_PKEY_CTX_ctrl.3
+++ b/secure/lib/libcrypto/man/man3/EVP_PKEY_CTX_ctrl.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,13 +133,13 @@
.\" ========================================================================
.\"
.IX Title "EVP_PKEY_CTX_CTRL 3"
-.TH EVP_PKEY_CTX_CTRL 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_PKEY_CTX_CTRL 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
.nh
.SH "NAME"
-EVP_PKEY_CTX_ctrl, EVP_PKEY_CTX_ctrl_str, EVP_PKEY_CTX_ctrl_uint64, EVP_PKEY_CTX_md, EVP_PKEY_CTX_set_signature_md, EVP_PKEY_CTX_get_signature_md, EVP_PKEY_CTX_set_mac_key, EVP_PKEY_CTX_set_rsa_padding, EVP_PKEY_CTX_get_rsa_padding, EVP_PKEY_CTX_set_rsa_pss_saltlen, EVP_PKEY_CTX_get_rsa_pss_saltlen, EVP_PKEY_CTX_set_rsa_keygen_bits, EVP_PKEY_CTX_set_rsa_keygen_pubexp, EVP_PKEY_CTX_set_rsa_keygen_primes, EVP_PKEY_CTX_set_rsa_mgf1_md, EVP_PKEY_CTX_get_rsa_mgf1_md, EVP_PKEY_CTX_set_rsa_oaep_md, EVP_PKEY_CTX_get_rsa_oaep_md, EVP_PKEY_CTX_set0_rsa_oaep_label, EVP_PKEY_CTX_get0_rsa_oaep_label, EVP_PKEY_CTX_set_dsa_paramgen_bits, EVP_PKEY_CTX_set_dh_paramgen_prime_len, EVP_PKEY_CTX_set_dh_paramgen_subprime_len, EVP_PKEY_CTX_set_dh_paramgen_generator, EVP_PKEY_CTX_set_dh_paramgen_type, EVP_PKEY_CTX_set_dh_rfc5114, EVP_PKEY_CTX_set_dhx_rfc5114, EVP_PKEY_CTX_set_dh_pad, EVP_PKEY_CTX_set_dh_nid, EVP_PKEY_CTX_set_dh_kdf_type, EVP_PKEY_CTX_get_dh_kdf_type, EVP_PKEY_CTX_set0_dh_kdf_oid, EVP_PKEY_CTX_get0_dh_kdf_oid, EVP_PKEY_CTX_set_dh_kdf_md, EVP_PKEY_CTX_get_dh_kdf_md, EVP_PKEY_CTX_set_dh_kdf_outlen, EVP_PKEY_CTX_get_dh_kdf_outlen, EVP_PKEY_CTX_set0_dh_kdf_ukm, EVP_PKEY_CTX_get0_dh_kdf_ukm, EVP_PKEY_CTX_set_ec_paramgen_curve_nid, EVP_PKEY_CTX_set_ec_param_enc, EVP_PKEY_CTX_set_ecdh_cofactor_mode, EVP_PKEY_CTX_get_ecdh_cofactor_mode, EVP_PKEY_CTX_set_ecdh_kdf_type, EVP_PKEY_CTX_get_ecdh_kdf_type, EVP_PKEY_CTX_set_ecdh_kdf_md, EVP_PKEY_CTX_get_ecdh_kdf_md, EVP_PKEY_CTX_set_ecdh_kdf_outlen, EVP_PKEY_CTX_get_ecdh_kdf_outlen, EVP_PKEY_CTX_set0_ecdh_kdf_ukm, EVP_PKEY_CTX_get0_ecdh_kdf_ukm, EVP_PKEY_CTX_set1_id, EVP_PKEY_CTX_get1_id, EVP_PKEY_CTX_get1_id_len \&\- algorithm specific control operations
+EVP_PKEY_CTX_ctrl, EVP_PKEY_CTX_ctrl_str, EVP_PKEY_CTX_ctrl_uint64, EVP_PKEY_CTX_md, EVP_PKEY_CTX_set_signature_md, EVP_PKEY_CTX_get_signature_md, EVP_PKEY_CTX_set_mac_key, EVP_PKEY_CTX_set_rsa_padding, EVP_PKEY_CTX_get_rsa_padding, EVP_PKEY_CTX_set_rsa_pss_saltlen, EVP_PKEY_CTX_get_rsa_pss_saltlen, EVP_PKEY_CTX_set_rsa_keygen_bits, EVP_PKEY_CTX_set_rsa_keygen_pubexp, EVP_PKEY_CTX_set_rsa_keygen_primes, EVP_PKEY_CTX_set_rsa_mgf1_md, EVP_PKEY_CTX_get_rsa_mgf1_md, EVP_PKEY_CTX_set_rsa_oaep_md, EVP_PKEY_CTX_get_rsa_oaep_md, EVP_PKEY_CTX_set0_rsa_oaep_label, EVP_PKEY_CTX_get0_rsa_oaep_label, EVP_PKEY_CTX_set_dsa_paramgen_bits, EVP_PKEY_CTX_set_dsa_paramgen_q_bits, EVP_PKEY_CTX_set_dsa_paramgen_md, EVP_PKEY_CTX_set_dh_paramgen_prime_len, EVP_PKEY_CTX_set_dh_paramgen_subprime_len, EVP_PKEY_CTX_set_dh_paramgen_generator, EVP_PKEY_CTX_set_dh_paramgen_type, EVP_PKEY_CTX_set_dh_rfc5114, EVP_PKEY_CTX_set_dhx_rfc5114, EVP_PKEY_CTX_set_dh_pad, EVP_PKEY_CTX_set_dh_nid, EVP_PKEY_CTX_set_dh_kdf_type, EVP_PKEY_CTX_get_dh_kdf_type, EVP_PKEY_CTX_set0_dh_kdf_oid, EVP_PKEY_CTX_get0_dh_kdf_oid, EVP_PKEY_CTX_set_dh_kdf_md, EVP_PKEY_CTX_get_dh_kdf_md, EVP_PKEY_CTX_set_dh_kdf_outlen, EVP_PKEY_CTX_get_dh_kdf_outlen, EVP_PKEY_CTX_set0_dh_kdf_ukm, EVP_PKEY_CTX_get0_dh_kdf_ukm, EVP_PKEY_CTX_set_ec_paramgen_curve_nid, EVP_PKEY_CTX_set_ec_param_enc, EVP_PKEY_CTX_set_ecdh_cofactor_mode, EVP_PKEY_CTX_get_ecdh_cofactor_mode, EVP_PKEY_CTX_set_ecdh_kdf_type, EVP_PKEY_CTX_get_ecdh_kdf_type, EVP_PKEY_CTX_set_ecdh_kdf_md, EVP_PKEY_CTX_get_ecdh_kdf_md, EVP_PKEY_CTX_set_ecdh_kdf_outlen, EVP_PKEY_CTX_get_ecdh_kdf_outlen, EVP_PKEY_CTX_set0_ecdh_kdf_ukm, EVP_PKEY_CTX_get0_ecdh_kdf_ukm, EVP_PKEY_CTX_set1_id, EVP_PKEY_CTX_get1_id, EVP_PKEY_CTX_get1_id_len \&\- algorithm specific control operations
.SH "SYNOPSIS"
.IX Header "SYNOPSIS"
.Vb 1
@@ -178,6 +178,8 @@ EVP_PKEY_CTX_ctrl, EVP_PKEY_CTX_ctrl_str, EVP_PKEY_CTX_ctrl_uint64, EVP_PKEY_CTX
\& #include <openssl/dsa.h>
\&
\& int EVP_PKEY_CTX_set_dsa_paramgen_bits(EVP_PKEY_CTX *ctx, int nbits);
+\& int EVP_PKEY_CTX_set_dsa_paramgen_q_bits(EVP_PKEY_CTX *ctx, int qbits);
+\& int EVP_PKEY_CTX_set_dsa_paramgen_md(EVP_PKEY_CTX *ctx, const EVP_MD *md);
\&
\& #include <openssl/dh.h>
\&
@@ -338,7 +340,17 @@ by the library and should not be freed by the caller.
.SS "\s-1DSA\s0 parameters"
.IX Subsection "DSA parameters"
The \fBEVP_PKEY_CTX_set_dsa_paramgen_bits()\fR macro sets the number of bits used
-for \s-1DSA\s0 parameter generation to \fBbits\fR. If not specified 1024 is used.
+for \s-1DSA\s0 parameter generation to \fBnbits\fR. If not specified, 1024 is used.
+.PP
+The \fBEVP_PKEY_CTX_set_dsa_paramgen_q_bits()\fR macro sets the number of bits in the
+subprime parameter \fBq\fR for \s-1DSA\s0 parameter generation to \fBqbits\fR. If not
+specified, 160 is used. If a digest function is specified below, this parameter
+is ignored and instead, the number of bits in \fBq\fR matches the size of the
+digest.
+.PP
+The \fBEVP_PKEY_CTX_set_dsa_paramgen_md()\fR macro sets the digest function used for
+\&\s-1DSA\s0 parameter generation to \fBmd\fR. If not specified, one of \s-1SHA\-1, SHA\-224,\s0 or
+\&\s-1SHA\-256\s0 is selected to match the bit length of \fBq\fR above.
.SS "\s-1DH\s0 parameters"
.IX Subsection "DH parameters"
The \fBEVP_PKEY_CTX_set_dh_paramgen_prime_len()\fR macro sets the length of the \s-1DH\s0
diff --git a/secure/lib/libcrypto/man/man3/EVP_PKEY_CTX_new.3 b/secure/lib/libcrypto/man/man3/EVP_PKEY_CTX_new.3
index cd32f4574c5ba..61779c83e5ecd 100644
--- a/secure/lib/libcrypto/man/man3/EVP_PKEY_CTX_new.3
+++ b/secure/lib/libcrypto/man/man3/EVP_PKEY_CTX_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_PKEY_CTX_NEW 3"
-.TH EVP_PKEY_CTX_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_PKEY_CTX_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_PKEY_CTX_set1_pbe_pass.3 b/secure/lib/libcrypto/man/man3/EVP_PKEY_CTX_set1_pbe_pass.3
index 77577bc356ff2..e4f93c5202016 100644
--- a/secure/lib/libcrypto/man/man3/EVP_PKEY_CTX_set1_pbe_pass.3
+++ b/secure/lib/libcrypto/man/man3/EVP_PKEY_CTX_set1_pbe_pass.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_PKEY_CTX_SET1_PBE_PASS 3"
-.TH EVP_PKEY_CTX_SET1_PBE_PASS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_PKEY_CTX_SET1_PBE_PASS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_PKEY_CTX_set_hkdf_md.3 b/secure/lib/libcrypto/man/man3/EVP_PKEY_CTX_set_hkdf_md.3
index 15795d6ff5450..7160cb1c46bf3 100644
--- a/secure/lib/libcrypto/man/man3/EVP_PKEY_CTX_set_hkdf_md.3
+++ b/secure/lib/libcrypto/man/man3/EVP_PKEY_CTX_set_hkdf_md.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_PKEY_CTX_SET_HKDF_MD 3"
-.TH EVP_PKEY_CTX_SET_HKDF_MD 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_PKEY_CTX_SET_HKDF_MD 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_PKEY_CTX_set_rsa_pss_keygen_md.3 b/secure/lib/libcrypto/man/man3/EVP_PKEY_CTX_set_rsa_pss_keygen_md.3
index 939d29cac16b8..be0ea1d2b8764 100644
--- a/secure/lib/libcrypto/man/man3/EVP_PKEY_CTX_set_rsa_pss_keygen_md.3
+++ b/secure/lib/libcrypto/man/man3/EVP_PKEY_CTX_set_rsa_pss_keygen_md.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_PKEY_CTX_SET_RSA_PSS_KEYGEN_MD 3"
-.TH EVP_PKEY_CTX_SET_RSA_PSS_KEYGEN_MD 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_PKEY_CTX_SET_RSA_PSS_KEYGEN_MD 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_PKEY_CTX_set_scrypt_N.3 b/secure/lib/libcrypto/man/man3/EVP_PKEY_CTX_set_scrypt_N.3
index 0794685ff34e2..e65dcf3dbe3bb 100644
--- a/secure/lib/libcrypto/man/man3/EVP_PKEY_CTX_set_scrypt_N.3
+++ b/secure/lib/libcrypto/man/man3/EVP_PKEY_CTX_set_scrypt_N.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_PKEY_CTX_SET_SCRYPT_N 3"
-.TH EVP_PKEY_CTX_SET_SCRYPT_N 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_PKEY_CTX_SET_SCRYPT_N 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_PKEY_CTX_set_tls1_prf_md.3 b/secure/lib/libcrypto/man/man3/EVP_PKEY_CTX_set_tls1_prf_md.3
index 7d09f0d1ab800..92b83c7ac3aae 100644
--- a/secure/lib/libcrypto/man/man3/EVP_PKEY_CTX_set_tls1_prf_md.3
+++ b/secure/lib/libcrypto/man/man3/EVP_PKEY_CTX_set_tls1_prf_md.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_PKEY_CTX_SET_TLS1_PRF_MD 3"
-.TH EVP_PKEY_CTX_SET_TLS1_PRF_MD 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_PKEY_CTX_SET_TLS1_PRF_MD 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_PKEY_asn1_get_count.3 b/secure/lib/libcrypto/man/man3/EVP_PKEY_asn1_get_count.3
index 9dbb17aa8dae8..29e2ee77f2586 100644
--- a/secure/lib/libcrypto/man/man3/EVP_PKEY_asn1_get_count.3
+++ b/secure/lib/libcrypto/man/man3/EVP_PKEY_asn1_get_count.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_PKEY_ASN1_GET_COUNT 3"
-.TH EVP_PKEY_ASN1_GET_COUNT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_PKEY_ASN1_GET_COUNT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_PKEY_cmp.3 b/secure/lib/libcrypto/man/man3/EVP_PKEY_cmp.3
index 6af3afbcc5ab4..21607010bf4c5 100644
--- a/secure/lib/libcrypto/man/man3/EVP_PKEY_cmp.3
+++ b/secure/lib/libcrypto/man/man3/EVP_PKEY_cmp.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_PKEY_CMP 3"
-.TH EVP_PKEY_CMP 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_PKEY_CMP 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_PKEY_decrypt.3 b/secure/lib/libcrypto/man/man3/EVP_PKEY_decrypt.3
index faa00d9d67748..4abb2ce8ea16e 100644
--- a/secure/lib/libcrypto/man/man3/EVP_PKEY_decrypt.3
+++ b/secure/lib/libcrypto/man/man3/EVP_PKEY_decrypt.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_PKEY_DECRYPT 3"
-.TH EVP_PKEY_DECRYPT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_PKEY_DECRYPT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_PKEY_derive.3 b/secure/lib/libcrypto/man/man3/EVP_PKEY_derive.3
index 2aa43c002a60e..21a472f8552f7 100644
--- a/secure/lib/libcrypto/man/man3/EVP_PKEY_derive.3
+++ b/secure/lib/libcrypto/man/man3/EVP_PKEY_derive.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_PKEY_DERIVE 3"
-.TH EVP_PKEY_DERIVE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_PKEY_DERIVE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_PKEY_encrypt.3 b/secure/lib/libcrypto/man/man3/EVP_PKEY_encrypt.3
index bdf8d35b7e0bb..e4fc2b4cd86a4 100644
--- a/secure/lib/libcrypto/man/man3/EVP_PKEY_encrypt.3
+++ b/secure/lib/libcrypto/man/man3/EVP_PKEY_encrypt.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_PKEY_ENCRYPT 3"
-.TH EVP_PKEY_ENCRYPT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_PKEY_ENCRYPT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_PKEY_get_default_digest_nid.3 b/secure/lib/libcrypto/man/man3/EVP_PKEY_get_default_digest_nid.3
index 777d60243707c..09af3d0813481 100644
--- a/secure/lib/libcrypto/man/man3/EVP_PKEY_get_default_digest_nid.3
+++ b/secure/lib/libcrypto/man/man3/EVP_PKEY_get_default_digest_nid.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_PKEY_GET_DEFAULT_DIGEST_NID 3"
-.TH EVP_PKEY_GET_DEFAULT_DIGEST_NID 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_PKEY_GET_DEFAULT_DIGEST_NID 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_PKEY_keygen.3 b/secure/lib/libcrypto/man/man3/EVP_PKEY_keygen.3
index d323f2a038f09..d23b679165bfe 100644
--- a/secure/lib/libcrypto/man/man3/EVP_PKEY_keygen.3
+++ b/secure/lib/libcrypto/man/man3/EVP_PKEY_keygen.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_PKEY_KEYGEN 3"
-.TH EVP_PKEY_KEYGEN 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_PKEY_KEYGEN 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_PKEY_meth_get_count.3 b/secure/lib/libcrypto/man/man3/EVP_PKEY_meth_get_count.3
index 0b9e750b16baa..04419b8dff2c9 100644
--- a/secure/lib/libcrypto/man/man3/EVP_PKEY_meth_get_count.3
+++ b/secure/lib/libcrypto/man/man3/EVP_PKEY_meth_get_count.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_PKEY_METH_GET_COUNT 3"
-.TH EVP_PKEY_METH_GET_COUNT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_PKEY_METH_GET_COUNT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_PKEY_meth_new.3 b/secure/lib/libcrypto/man/man3/EVP_PKEY_meth_new.3
index 465a3499ed7aa..a7ee1c9069a41 100644
--- a/secure/lib/libcrypto/man/man3/EVP_PKEY_meth_new.3
+++ b/secure/lib/libcrypto/man/man3/EVP_PKEY_meth_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,13 +133,13 @@
.\" ========================================================================
.\"
.IX Title "EVP_PKEY_METH_NEW 3"
-.TH EVP_PKEY_METH_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_PKEY_METH_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
.nh
.SH "NAME"
-EVP_PKEY_meth_new, EVP_PKEY_meth_free, EVP_PKEY_meth_copy, EVP_PKEY_meth_find, EVP_PKEY_meth_add0, EVP_PKEY_METHOD, EVP_PKEY_meth_set_init, EVP_PKEY_meth_set_copy, EVP_PKEY_meth_set_cleanup, EVP_PKEY_meth_set_paramgen, EVP_PKEY_meth_set_keygen, EVP_PKEY_meth_set_sign, EVP_PKEY_meth_set_verify, EVP_PKEY_meth_set_verify_recover, EVP_PKEY_meth_set_signctx, EVP_PKEY_meth_set_verifyctx, EVP_PKEY_meth_set_encrypt, EVP_PKEY_meth_set_decrypt, EVP_PKEY_meth_set_derive, EVP_PKEY_meth_set_ctrl, EVP_PKEY_meth_set_check, EVP_PKEY_meth_set_public_check, EVP_PKEY_meth_set_param_check, EVP_PKEY_meth_set_digest_custom, EVP_PKEY_meth_get_init, EVP_PKEY_meth_get_copy, EVP_PKEY_meth_get_cleanup, EVP_PKEY_meth_get_paramgen, EVP_PKEY_meth_get_keygen, EVP_PKEY_meth_get_sign, EVP_PKEY_meth_get_verify, EVP_PKEY_meth_get_verify_recover, EVP_PKEY_meth_get_signctx, EVP_PKEY_meth_get_verifyctx, EVP_PKEY_meth_get_encrypt, EVP_PKEY_meth_get_decrypt, EVP_PKEY_meth_get_derive, EVP_PKEY_meth_get_ctrl, EVP_PKEY_meth_get_check, EVP_PKEY_meth_get_public_check, EVP_PKEY_meth_get_param_check, EVP_PKEY_meth_get_digest_custom, EVP_PKEY_meth_remove \&\- manipulating EVP_PKEY_METHOD structure
+EVP_PKEY_meth_new, EVP_PKEY_meth_free, EVP_PKEY_meth_copy, EVP_PKEY_meth_find, EVP_PKEY_meth_add0, EVP_PKEY_METHOD, EVP_PKEY_meth_set_init, EVP_PKEY_meth_set_copy, EVP_PKEY_meth_set_cleanup, EVP_PKEY_meth_set_paramgen, EVP_PKEY_meth_set_keygen, EVP_PKEY_meth_set_sign, EVP_PKEY_meth_set_verify, EVP_PKEY_meth_set_verify_recover, EVP_PKEY_meth_set_signctx, EVP_PKEY_meth_set_verifyctx, EVP_PKEY_meth_set_encrypt, EVP_PKEY_meth_set_decrypt, EVP_PKEY_meth_set_derive, EVP_PKEY_meth_set_ctrl, EVP_PKEY_meth_set_digestsign, EVP_PKEY_meth_set_digestverify, EVP_PKEY_meth_set_check, EVP_PKEY_meth_set_public_check, EVP_PKEY_meth_set_param_check, EVP_PKEY_meth_set_digest_custom, EVP_PKEY_meth_get_init, EVP_PKEY_meth_get_copy, EVP_PKEY_meth_get_cleanup, EVP_PKEY_meth_get_paramgen, EVP_PKEY_meth_get_keygen, EVP_PKEY_meth_get_sign, EVP_PKEY_meth_get_verify, EVP_PKEY_meth_get_verify_recover, EVP_PKEY_meth_get_signctx, EVP_PKEY_meth_get_verifyctx, EVP_PKEY_meth_get_encrypt, EVP_PKEY_meth_get_decrypt, EVP_PKEY_meth_get_derive, EVP_PKEY_meth_get_ctrl, EVP_PKEY_meth_get_digestsign, EVP_PKEY_meth_get_digestverify, EVP_PKEY_meth_get_check, EVP_PKEY_meth_get_public_check, EVP_PKEY_meth_get_param_check, EVP_PKEY_meth_get_digest_custom, EVP_PKEY_meth_remove \&\- manipulating EVP_PKEY_METHOD structure
.SH "SYNOPSIS"
.IX Header "SYNOPSIS"
.Vb 1
@@ -232,6 +232,18 @@ EVP_PKEY_meth_new, EVP_PKEY_meth_free, EVP_PKEY_meth_copy, EVP_PKEY_meth_find, E
\& int (*ctrl_str) (EVP_PKEY_CTX *ctx,
\& const char *type,
\& const char *value));
+\& void EVP_PKEY_meth_set_digestsign(EVP_PKEY_METHOD *pmeth,
+\& int (*digestsign) (EVP_MD_CTX *ctx,
+\& unsigned char *sig,
+\& size_t *siglen,
+\& const unsigned char *tbs,
+\& size_t tbslen));
+\& void EVP_PKEY_meth_set_digestverify(EVP_PKEY_METHOD *pmeth,
+\& int (*digestverify) (EVP_MD_CTX *ctx,
+\& const unsigned char *sig,
+\& size_t siglen,
+\& const unsigned char *tbs,
+\& size_t tbslen));
\& void EVP_PKEY_meth_set_check(EVP_PKEY_METHOD *pmeth,
\& int (*check) (EVP_PKEY *pkey));
\& void EVP_PKEY_meth_set_public_check(EVP_PKEY_METHOD *pmeth,
@@ -320,6 +332,18 @@ EVP_PKEY_meth_new, EVP_PKEY_meth_free, EVP_PKEY_meth_copy, EVP_PKEY_meth_find, E
\& int (**pctrl_str) (EVP_PKEY_CTX *ctx,
\& const char *type,
\& const char *value));
+\& void EVP_PKEY_meth_get_digestsign(EVP_PKEY_METHOD *pmeth,
+\& int (**digestsign) (EVP_MD_CTX *ctx,
+\& unsigned char *sig,
+\& size_t *siglen,
+\& const unsigned char *tbs,
+\& size_t tbslen));
+\& void EVP_PKEY_meth_get_digestverify(EVP_PKEY_METHOD *pmeth,
+\& int (**digestverify) (EVP_MD_CTX *ctx,
+\& const unsigned char *sig,
+\& size_t siglen,
+\& const unsigned char *tbs,
+\& size_t tbslen));
\& void EVP_PKEY_meth_get_check(const EVP_PKEY_METHOD *pmeth,
\& int (**pcheck) (EVP_PKEY *pkey));
\& void EVP_PKEY_meth_get_public_check(const EVP_PKEY_METHOD *pmeth,
diff --git a/secure/lib/libcrypto/man/man3/EVP_PKEY_new.3 b/secure/lib/libcrypto/man/man3/EVP_PKEY_new.3
index 708dac7c7a2f1..59027734f7e17 100644
--- a/secure/lib/libcrypto/man/man3/EVP_PKEY_new.3
+++ b/secure/lib/libcrypto/man/man3/EVP_PKEY_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_PKEY_NEW 3"
-.TH EVP_PKEY_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_PKEY_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
@@ -200,21 +200,24 @@ creation of a \s-1CMAC\s0 in the \fBcipher\fR argument.
New applications should use \fBEVP_PKEY_new_raw_private_key()\fR instead.
.PP
\&\fBEVP_PKEY_get_raw_private_key()\fR fills the buffer provided by \fBpriv\fR with raw
-private key data. The number of bytes written is populated in \fB*len\fR. If the
-buffer \fBpriv\fR is \s-1NULL\s0 then \fB*len\fR is populated with the number of bytes
-required to hold the key. The calling application is responsible for ensuring
-that the buffer is large enough to receive the private key data. This function
-only works for algorithms that support raw private keys. Currently this is:
-\&\fB\s-1EVP_PKEY_HMAC\s0\fR, \fB\s-1EVP_PKEY_POLY1305\s0\fR, \fB\s-1EVP_PKEY_SIPHASH\s0\fR, \fB\s-1EVP_PKEY_X25519\s0\fR,
-\&\fB\s-1EVP_PKEY_ED25519\s0\fR, \fB\s-1EVP_PKEY_X448\s0\fR or \fB\s-1EVP_PKEY_ED448\s0\fR.
+private key data. The size of the \fBpriv\fR buffer should be in \fB*len\fR on entry
+to the function, and on exit \fB*len\fR is updated with the number of bytes
+actually written. If the buffer \fBpriv\fR is \s-1NULL\s0 then \fB*len\fR is populated with
+the number of bytes required to hold the key. The calling application is
+responsible for ensuring that the buffer is large enough to receive the private
+key data. This function only works for algorithms that support raw private keys.
+Currently this is: \fB\s-1EVP_PKEY_HMAC\s0\fR, \fB\s-1EVP_PKEY_POLY1305\s0\fR, \fB\s-1EVP_PKEY_SIPHASH\s0\fR,
+\&\fB\s-1EVP_PKEY_X25519\s0\fR, \fB\s-1EVP_PKEY_ED25519\s0\fR, \fB\s-1EVP_PKEY_X448\s0\fR or \fB\s-1EVP_PKEY_ED448\s0\fR.
.PP
\&\fBEVP_PKEY_get_raw_public_key()\fR fills the buffer provided by \fBpub\fR with raw
-public key data. The number of bytes written is populated in \fB*len\fR. If the
-buffer \fBpub\fR is \s-1NULL\s0 then \fB*len\fR is populated with the number of bytes
-required to hold the key. The calling application is responsible for ensuring
-that the buffer is large enough to receive the public key data. This function
-only works for algorithms that support raw public keys. Currently this is:
-\&\fB\s-1EVP_PKEY_X25519\s0\fR, \fB\s-1EVP_PKEY_ED25519\s0\fR, \fB\s-1EVP_PKEY_X448\s0\fR or \fB\s-1EVP_PKEY_ED448\s0\fR.
+public key data. The size of the \fBpub\fR buffer should be in \fB*len\fR on entry
+to the function, and on exit \fB*len\fR is updated with the number of bytes
+actually written. If the buffer \fBpub\fR is \s-1NULL\s0 then \fB*len\fR is populated with
+the number of bytes required to hold the key. The calling application is
+responsible for ensuring that the buffer is large enough to receive the public
+key data. This function only works for algorithms that support raw public keys.
+Currently this is: \fB\s-1EVP_PKEY_X25519\s0\fR, \fB\s-1EVP_PKEY_ED25519\s0\fR, \fB\s-1EVP_PKEY_X448\s0\fR or
+\&\fB\s-1EVP_PKEY_ED448\s0\fR.
.SH "NOTES"
.IX Header "NOTES"
The \fB\s-1EVP_PKEY\s0\fR structure is used by various OpenSSL functions which require a
@@ -249,7 +252,7 @@ The
\&\fBEVP_PKEY_get_raw_public_key()\fR functions were added in OpenSSL 1.1.1.
.SH "COPYRIGHT"
.IX Header "COPYRIGHT"
-Copyright 2002\-2018 The OpenSSL Project Authors. All Rights Reserved.
+Copyright 2002\-2020 The OpenSSL Project Authors. All Rights Reserved.
.PP
Licensed under the OpenSSL license (the \*(L"License\*(R"). You may not use
this file except in compliance with the License. You can obtain a copy
diff --git a/secure/lib/libcrypto/man/man3/EVP_PKEY_print_private.3 b/secure/lib/libcrypto/man/man3/EVP_PKEY_print_private.3
index b8d33cc738920..95e6aee6d28c6 100644
--- a/secure/lib/libcrypto/man/man3/EVP_PKEY_print_private.3
+++ b/secure/lib/libcrypto/man/man3/EVP_PKEY_print_private.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_PKEY_PRINT_PRIVATE 3"
-.TH EVP_PKEY_PRINT_PRIVATE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_PKEY_PRINT_PRIVATE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_PKEY_set1_RSA.3 b/secure/lib/libcrypto/man/man3/EVP_PKEY_set1_RSA.3
index 9d4c4670e4e86..f0b08964dcd78 100644
--- a/secure/lib/libcrypto/man/man3/EVP_PKEY_set1_RSA.3
+++ b/secure/lib/libcrypto/man/man3/EVP_PKEY_set1_RSA.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_PKEY_SET1_RSA 3"
-.TH EVP_PKEY_SET1_RSA 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_PKEY_SET1_RSA 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_PKEY_sign.3 b/secure/lib/libcrypto/man/man3/EVP_PKEY_sign.3
index 8887fb79080e5..46406681d670e 100644
--- a/secure/lib/libcrypto/man/man3/EVP_PKEY_sign.3
+++ b/secure/lib/libcrypto/man/man3/EVP_PKEY_sign.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_PKEY_SIGN 3"
-.TH EVP_PKEY_SIGN 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_PKEY_SIGN 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_PKEY_size.3 b/secure/lib/libcrypto/man/man3/EVP_PKEY_size.3
new file mode 100644
index 0000000000000..b58861eee8989
--- /dev/null
+++ b/secure/lib/libcrypto/man/man3/EVP_PKEY_size.3
@@ -0,0 +1,210 @@
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
+.\"
+.\" Standard preamble:
+.\" ========================================================================
+.de Sp \" Vertical space (when we can't use .PP)
+.if t .sp .5v
+.if n .sp
+..
+.de Vb \" Begin verbatim text
+.ft CW
+.nf
+.ne \\$1
+..
+.de Ve \" End verbatim text
+.ft R
+.fi
+..
+.\" Set up some character translations and predefined strings. \*(-- will
+.\" give an unbreakable dash, \*(PI will give pi, \*(L" will give a left
+.\" double quote, and \*(R" will give a right double quote. \*(C+ will
+.\" give a nicer C++. Capital omega is used to do unbreakable dashes and
+.\" therefore won't be available. \*(C` and \*(C' expand to `' in nroff,
+.\" nothing in troff, for use with C<>.
+.tr \(*W-
+.ds C+ C\v'-.1v'\h'-1p'\s-2+\h'-1p'+\s0\v'.1v'\h'-1p'
+.ie n \{\
+. ds -- \(*W-
+. ds PI pi
+. if (\n(.H=4u)&(1m=24u) .ds -- \(*W\h'-12u'\(*W\h'-12u'-\" diablo 10 pitch
+. if (\n(.H=4u)&(1m=20u) .ds -- \(*W\h'-12u'\(*W\h'-8u'-\" diablo 12 pitch
+. ds L" ""
+. ds R" ""
+. ds C` ""
+. ds C' ""
+'br\}
+.el\{\
+. ds -- \|\(em\|
+. ds PI \(*p
+. ds L" ``
+. ds R" ''
+. ds C`
+. ds C'
+'br\}
+.\"
+.\" Escape single quotes in literal strings from groff's Unicode transform.
+.ie \n(.g .ds Aq \(aq
+.el .ds Aq '
+.\"
+.\" If the F register is >0, we'll generate index entries on stderr for
+.\" titles (.TH), headers (.SH), subsections (.SS), items (.Ip), and index
+.\" entries marked with X<> in POD. Of course, you'll have to process the
+.\" output yourself in some meaningful fashion.
+.\"
+.\" Avoid warning from groff about undefined register 'F'.
+.de IX
+..
+.nr rF 0
+.if \n(.g .if rF .nr rF 1
+.if (\n(rF:(\n(.g==0)) \{\
+. if \nF \{\
+. de IX
+. tm Index:\\$1\t\\n%\t"\\$2"
+..
+. if !\nF==2 \{\
+. nr % 0
+. nr F 2
+. \}
+. \}
+.\}
+.rr rF
+.\"
+.\" Accent mark definitions (@(#)ms.acc 1.5 88/02/08 SMI; from UCB 4.2).
+.\" Fear. Run. Save yourself. No user-serviceable parts.
+. \" fudge factors for nroff and troff
+.if n \{\
+. ds #H 0
+. ds #V .8m
+. ds #F .3m
+. ds #[ \f1
+. ds #] \fP
+.\}
+.if t \{\
+. ds #H ((1u-(\\\\n(.fu%2u))*.13m)
+. ds #V .6m
+. ds #F 0
+. ds #[ \&
+. ds #] \&
+.\}
+. \" simple accents for nroff and troff
+.if n \{\
+. ds ' \&
+. ds ` \&
+. ds ^ \&
+. ds , \&
+. ds ~ ~
+. ds /
+.\}
+.if t \{\
+. ds ' \\k:\h'-(\\n(.wu*8/10-\*(#H)'\'\h"|\\n:u"
+. ds ` \\k:\h'-(\\n(.wu*8/10-\*(#H)'\`\h'|\\n:u'
+. ds ^ \\k:\h'-(\\n(.wu*10/11-\*(#H)'^\h'|\\n:u'
+. ds , \\k:\h'-(\\n(.wu*8/10)',\h'|\\n:u'
+. ds ~ \\k:\h'-(\\n(.wu-\*(#H-.1m)'~\h'|\\n:u'
+. ds / \\k:\h'-(\\n(.wu*8/10-\*(#H)'\z\(sl\h'|\\n:u'
+.\}
+. \" troff and (daisy-wheel) nroff accents
+.ds : \\k:\h'-(\\n(.wu*8/10-\*(#H+.1m+\*(#F)'\v'-\*(#V'\z.\h'.2m+\*(#F'.\h'|\\n:u'\v'\*(#V'
+.ds 8 \h'\*(#H'\(*b\h'-\*(#H'
+.ds o \\k:\h'-(\\n(.wu+\w'\(de'u-\*(#H)/2u'\v'-.3n'\*(#[\z\(de\v'.3n'\h'|\\n:u'\*(#]
+.ds d- \h'\*(#H'\(pd\h'-\w'~'u'\v'-.25m'\f2\(hy\fP\v'.25m'\h'-\*(#H'
+.ds D- D\\k:\h'-\w'D'u'\v'-.11m'\z\(hy\v'.11m'\h'|\\n:u'
+.ds th \*(#[\v'.3m'\s+1I\s-1\v'-.3m'\h'-(\w'I'u*2/3)'\s-1o\s+1\*(#]
+.ds Th \*(#[\s+2I\s-2\h'-\w'I'u*3/5'\v'-.3m'o\v'.3m'\*(#]
+.ds ae a\h'-(\w'a'u*4/10)'e
+.ds Ae A\h'-(\w'A'u*4/10)'E
+. \" corrections for vroff
+.if v .ds ~ \\k:\h'-(\\n(.wu*9/10-\*(#H)'\s-2\u~\d\s+2\h'|\\n:u'
+.if v .ds ^ \\k:\h'-(\\n(.wu*10/11-\*(#H)'\v'-.4m'^\v'.4m'\h'|\\n:u'
+. \" for low resolution devices (crt and lpr)
+.if \n(.H>23 .if \n(.V>19 \
+\{\
+. ds : e
+. ds 8 ss
+. ds o a
+. ds d- d\h'-1'\(ga
+. ds D- D\h'-1'\(hy
+. ds th \o'bp'
+. ds Th \o'LP'
+. ds ae ae
+. ds Ae AE
+.\}
+.rm #[ #] #H #V #F C
+.\" ========================================================================
+.\"
+.IX Title "EVP_PKEY_SIZE 3"
+.TH EVP_PKEY_SIZE 3 "2020-03-17" "1.1.1e" "OpenSSL"
+.\" For nroff, turn off justification. Always turn off hyphenation; it makes
+.\" way too many mistakes in technical documents.
+.if n .ad l
+.nh
+.SH "NAME"
+EVP_PKEY_size, EVP_PKEY_bits, EVP_PKEY_security_bits \&\- EVP_PKEY information functions
+.SH "SYNOPSIS"
+.IX Header "SYNOPSIS"
+.Vb 1
+\& #include <openssl/evp.h>
+\&
+\& int EVP_PKEY_size(const EVP_PKEY *pkey);
+\& int EVP_PKEY_bits(const EVP_PKEY *pkey);
+\& int EVP_PKEY_security_bits(const EVP_PKEY *pkey);
+.Ve
+.SH "DESCRIPTION"
+.IX Header "DESCRIPTION"
+\&\fBEVP_PKEY_size()\fR returns the maximum suitable size for the output
+buffers for almost all operations that can be done with \fIpkey\fR.
+The primary documented use is with \fBEVP_SignFinal\fR\|(3) and
+\&\fBEVP_SealInit\fR\|(3), but it isn't limited there. The returned size is
+also large enough for the output buffer of \fBEVP_PKEY_sign\fR\|(3),
+\&\fBEVP_PKEY_encrypt\fR\|(3), \fBEVP_PKEY_decrypt\fR\|(3), \fBEVP_PKEY_derive\fR\|(3).
+.PP
+It must be stressed that, unless the documentation for the operation
+that's being performed says otherwise, the size returned by
+\&\fBEVP_PKEY_size()\fR is only preliminary and not exact, so the final
+contents of the target buffer may be smaller. It is therefore crucial
+to take note of the size given back by the function that performs the
+operation, such as \fBEVP_PKEY_sign\fR\|(3) (the \fIsiglen\fR argument will
+receive that length), to avoid bugs.
+.PP
+\&\fBEVP_PKEY_bits()\fR returns the cryptographic length of the cryptosystem
+to which the key in \fIpkey\fR belongs, in bits. Note that the definition
+of cryptographic length is specific to the key cryptosystem.
+.PP
+\&\fBEVP_PKEY_security_bits()\fR returns the number of security bits of the given
+\&\fIpkey\fR, bits of security is defined in \s-1NIST SP800\-57.\s0
+.SH "RETURN VALUES"
+.IX Header "RETURN VALUES"
+\&\fBEVP_PKEY_size()\fR, \fBEVP_PKEY_bits()\fR and \fBEVP_PKEY_security_bits()\fR return a
+positive number, or 0 if this size isn't available.
+.SH "NOTES"
+.IX Header "NOTES"
+Most functions that have an output buffer and are mentioned with
+\&\fBEVP_PKEY_size()\fR have a functionality where you can pass \s-1NULL\s0 for the
+buffer and still pass a pointer to an integer and get the exact size
+that this function call delivers in the context that it's called in.
+This allows those functions to be called twice, once to find out the
+exact buffer size, then allocate the buffer in between, and call that
+function again actually output the data. For those functions, it
+isn't strictly necessary to call \fBEVP_PKEY_size()\fR to find out the
+buffer size, but may be useful in cases where it's desirable to know
+the upper limit in advance.
+.PP
+It should also be especially noted that \fBEVP_PKEY_size()\fR shouldn't be
+used to get the output size for \fBEVP_DigestSignFinal()\fR, according to
+\&\*(L"\s-1NOTES\*(R"\s0 in \fBEVP_DigestSignFinal\fR\|(3).
+.SH "SEE ALSO"
+.IX Header "SEE ALSO"
+\&\fBEVP_SignFinal\fR\|(3),
+\&\fBEVP_SealInit\fR\|(3),
+\&\fBEVP_PKEY_sign\fR\|(3),
+\&\fBEVP_PKEY_encrypt\fR\|(3),
+\&\fBEVP_PKEY_decrypt\fR\|(3),
+\&\fBEVP_PKEY_derive\fR\|(3)
+.SH "COPYRIGHT"
+.IX Header "COPYRIGHT"
+Copyright 2020 The OpenSSL Project Authors. All Rights Reserved.
+.PP
+Licensed under the Apache License 2.0 (the \*(L"License\*(R"). You may not use
+this file except in compliance with the License. You can obtain a copy
+in the file \s-1LICENSE\s0 in the source distribution or at
+<https://www.openssl.org/source/license.html>.
diff --git a/secure/lib/libcrypto/man/man3/EVP_PKEY_verify.3 b/secure/lib/libcrypto/man/man3/EVP_PKEY_verify.3
index c7f15a36b1618..310b196dc9916 100644
--- a/secure/lib/libcrypto/man/man3/EVP_PKEY_verify.3
+++ b/secure/lib/libcrypto/man/man3/EVP_PKEY_verify.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_PKEY_VERIFY 3"
-.TH EVP_PKEY_VERIFY 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_PKEY_VERIFY 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_PKEY_verify_recover.3 b/secure/lib/libcrypto/man/man3/EVP_PKEY_verify_recover.3
index d75534aa73f5e..df5d9dfc786c0 100644
--- a/secure/lib/libcrypto/man/man3/EVP_PKEY_verify_recover.3
+++ b/secure/lib/libcrypto/man/man3/EVP_PKEY_verify_recover.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_PKEY_VERIFY_RECOVER 3"
-.TH EVP_PKEY_VERIFY_RECOVER 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_PKEY_VERIFY_RECOVER 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_SealInit.3 b/secure/lib/libcrypto/man/man3/EVP_SealInit.3
index 32c26df471a9b..2671c0ef649e9 100644
--- a/secure/lib/libcrypto/man/man3/EVP_SealInit.3
+++ b/secure/lib/libcrypto/man/man3/EVP_SealInit.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_SEALINIT 3"
-.TH EVP_SEALINIT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_SEALINIT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_SignInit.3 b/secure/lib/libcrypto/man/man3/EVP_SignInit.3
index cf7eb2b1a672d..4279defd5d677 100644
--- a/secure/lib/libcrypto/man/man3/EVP_SignInit.3
+++ b/secure/lib/libcrypto/man/man3/EVP_SignInit.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,13 +133,13 @@
.\" ========================================================================
.\"
.IX Title "EVP_SIGNINIT 3"
-.TH EVP_SIGNINIT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_SIGNINIT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
.nh
.SH "NAME"
-EVP_PKEY_size, EVP_SignInit, EVP_SignInit_ex, EVP_SignUpdate, EVP_SignFinal, EVP_PKEY_security_bits \- EVP signing functions
+EVP_SignInit, EVP_SignInit_ex, EVP_SignUpdate, EVP_SignFinal \&\- EVP signing functions
.SH "SYNOPSIS"
.IX Header "SYNOPSIS"
.Vb 1
@@ -150,48 +150,35 @@ EVP_PKEY_size, EVP_SignInit, EVP_SignInit_ex, EVP_SignUpdate, EVP_SignFinal, EVP
\& int EVP_SignFinal(EVP_MD_CTX *ctx, unsigned char *sig, unsigned int *s, EVP_PKEY *pkey);
\&
\& void EVP_SignInit(EVP_MD_CTX *ctx, const EVP_MD *type);
-\&
-\& int EVP_PKEY_size(const EVP_PKEY *pkey);
-\& int EVP_PKEY_security_bits(const EVP_PKEY *pkey);
.Ve
.SH "DESCRIPTION"
.IX Header "DESCRIPTION"
The \s-1EVP\s0 signature routines are a high level interface to digital
signatures.
.PP
-\&\fBEVP_SignInit_ex()\fR sets up signing context \fBctx\fR to use digest
-\&\fBtype\fR from \s-1ENGINE\s0 \fBimpl\fR. \fBctx\fR must be created with
+\&\fBEVP_SignInit_ex()\fR sets up signing context \fIctx\fR to use digest
+\&\fItype\fR from \fB\s-1ENGINE\s0\fR \fIimpl\fR. \fIctx\fR must be created with
\&\fBEVP_MD_CTX_new()\fR before calling this function.
.PP
-\&\fBEVP_SignUpdate()\fR hashes \fBcnt\fR bytes of data at \fBd\fR into the
-signature context \fBctx\fR. This function can be called several times on the
-same \fBctx\fR to include additional data.
+\&\fBEVP_SignUpdate()\fR hashes \fIcnt\fR bytes of data at \fId\fR into the
+signature context \fIctx\fR. This function can be called several times on the
+same \fIctx\fR to include additional data.
.PP
-\&\fBEVP_SignFinal()\fR signs the data in \fBctx\fR using the private key \fBpkey\fR and
-places the signature in \fBsig\fR. \fBsig\fR must be at least EVP_PKEY_size(pkey)
-bytes in size. \fBs\fR is an \s-1OUT\s0 parameter, and not used as an \s-1IN\s0 parameter.
+\&\fBEVP_SignFinal()\fR signs the data in \fIctx\fR using the private key \fIpkey\fR and
+places the signature in \fIsig\fR. \fIsig\fR must be at least \f(CW\*(C`EVP_PKEY_size(pkey)\*(C'\fR
+bytes in size. \fIs\fR is an \s-1OUT\s0 parameter, and not used as an \s-1IN\s0 parameter.
The number of bytes of data written (i.e. the length of the signature)
-will be written to the integer at \fBs\fR, at most EVP_PKEY_size(pkey) bytes
+will be written to the integer at \fIs\fR, at most \f(CW\*(C`EVP_PKEY_size(pkey)\*(C'\fR bytes
will be written.
.PP
-\&\fBEVP_SignInit()\fR initializes a signing context \fBctx\fR to use the default
-implementation of digest \fBtype\fR.
-.PP
-\&\fBEVP_PKEY_size()\fR returns the maximum size of a signature in bytes. The actual
-signature returned by \fBEVP_SignFinal()\fR may be smaller.
-.PP
-\&\fBEVP_PKEY_security_bits()\fR returns the number of security bits of the given \fBpkey\fR,
-bits of security is defined in \s-1NIST SP800\-57.\s0
+\&\fBEVP_SignInit()\fR initializes a signing context \fIctx\fR to use the default
+implementation of digest \fItype\fR.
.SH "RETURN VALUES"
.IX Header "RETURN VALUES"
\&\fBEVP_SignInit_ex()\fR, \fBEVP_SignUpdate()\fR and \fBEVP_SignFinal()\fR return 1
for success and 0 for failure.
.PP
-\&\fBEVP_PKEY_size()\fR returns the maximum size of a signature in bytes.
-.PP
The error codes can be obtained by \fBERR_get_error\fR\|(3).
-.PP
-\&\fBEVP_PKEY_security_bits()\fR returns the number of security bits.
.SH "NOTES"
.IX Header "NOTES"
The \fB\s-1EVP\s0\fR interface to digital signatures should almost always be used in
@@ -225,6 +212,7 @@ It is not possible to change the signing parameters using these function.
The previous two bugs are fixed in the newer EVP_SignDigest*() function.
.SH "SEE ALSO"
.IX Header "SEE ALSO"
+\&\fBEVP_PKEY_size\fR\|(3), \fBEVP_PKEY_bits\fR\|(3), \fBEVP_PKEY_security_bits\fR\|(3),
\&\fBEVP_VerifyInit\fR\|(3),
\&\fBEVP_DigestInit\fR\|(3),
\&\fBevp\fR\|(7), \s-1\fBHMAC\s0\fR\|(3), \s-1\fBMD2\s0\fR\|(3),
@@ -232,7 +220,7 @@ The previous two bugs are fixed in the newer EVP_SignDigest*() function.
\&\s-1\fBSHA1\s0\fR\|(3), \fBdgst\fR\|(1)
.SH "COPYRIGHT"
.IX Header "COPYRIGHT"
-Copyright 2000\-2019 The OpenSSL Project Authors. All Rights Reserved.
+Copyright 2000\-2020 The OpenSSL Project Authors. All Rights Reserved.
.PP
Licensed under the OpenSSL license (the \*(L"License\*(R"). You may not use
this file except in compliance with the License. You can obtain a copy
diff --git a/secure/lib/libcrypto/man/man3/EVP_VerifyInit.3 b/secure/lib/libcrypto/man/man3/EVP_VerifyInit.3
index 6ce5eafb4085a..9d074bab761bb 100644
--- a/secure/lib/libcrypto/man/man3/EVP_VerifyInit.3
+++ b/secure/lib/libcrypto/man/man3/EVP_VerifyInit.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_VERIFYINIT 3"
-.TH EVP_VERIFYINIT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_VERIFYINIT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_aes.3 b/secure/lib/libcrypto/man/man3/EVP_aes.3
index c79fbc05d30ac..27cfb6ccc444c 100644
--- a/secure/lib/libcrypto/man/man3/EVP_aes.3
+++ b/secure/lib/libcrypto/man/man3/EVP_aes.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_AES 3"
-.TH EVP_AES 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_AES 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_aria.3 b/secure/lib/libcrypto/man/man3/EVP_aria.3
index 65fd2843fc27a..57a971e79cfaa 100644
--- a/secure/lib/libcrypto/man/man3/EVP_aria.3
+++ b/secure/lib/libcrypto/man/man3/EVP_aria.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_ARIA 3"
-.TH EVP_ARIA 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_ARIA 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_bf_cbc.3 b/secure/lib/libcrypto/man/man3/EVP_bf_cbc.3
index e04fc08cddb9d..08ca0432c477c 100644
--- a/secure/lib/libcrypto/man/man3/EVP_bf_cbc.3
+++ b/secure/lib/libcrypto/man/man3/EVP_bf_cbc.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_BF_CBC 3"
-.TH EVP_BF_CBC 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_BF_CBC 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_blake2b512.3 b/secure/lib/libcrypto/man/man3/EVP_blake2b512.3
index 257b6707290ce..960ba4f496eff 100644
--- a/secure/lib/libcrypto/man/man3/EVP_blake2b512.3
+++ b/secure/lib/libcrypto/man/man3/EVP_blake2b512.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_BLAKE2B512 3"
-.TH EVP_BLAKE2B512 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_BLAKE2B512 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_camellia.3 b/secure/lib/libcrypto/man/man3/EVP_camellia.3
index f492975343af0..9abd48fb28574 100644
--- a/secure/lib/libcrypto/man/man3/EVP_camellia.3
+++ b/secure/lib/libcrypto/man/man3/EVP_camellia.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_CAMELLIA 3"
-.TH EVP_CAMELLIA 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_CAMELLIA 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_cast5_cbc.3 b/secure/lib/libcrypto/man/man3/EVP_cast5_cbc.3
index 907808dbb23aa..28dbefc6ba14f 100644
--- a/secure/lib/libcrypto/man/man3/EVP_cast5_cbc.3
+++ b/secure/lib/libcrypto/man/man3/EVP_cast5_cbc.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_CAST5_CBC 3"
-.TH EVP_CAST5_CBC 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_CAST5_CBC 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_chacha20.3 b/secure/lib/libcrypto/man/man3/EVP_chacha20.3
index 0a3f1c52d8e51..e57b40c1a4931 100644
--- a/secure/lib/libcrypto/man/man3/EVP_chacha20.3
+++ b/secure/lib/libcrypto/man/man3/EVP_chacha20.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_CHACHA20 3"
-.TH EVP_CHACHA20 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_CHACHA20 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_des.3 b/secure/lib/libcrypto/man/man3/EVP_des.3
index 2e809fa8ae20c..5da73882ac29b 100644
--- a/secure/lib/libcrypto/man/man3/EVP_des.3
+++ b/secure/lib/libcrypto/man/man3/EVP_des.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_DES 3"
-.TH EVP_DES 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_DES 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_desx_cbc.3 b/secure/lib/libcrypto/man/man3/EVP_desx_cbc.3
index 4f50dd4a9cccf..75ae0acc3b15a 100644
--- a/secure/lib/libcrypto/man/man3/EVP_desx_cbc.3
+++ b/secure/lib/libcrypto/man/man3/EVP_desx_cbc.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_DESX_CBC 3"
-.TH EVP_DESX_CBC 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_DESX_CBC 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_idea_cbc.3 b/secure/lib/libcrypto/man/man3/EVP_idea_cbc.3
index 5fca826ccf616..0490ba85cd63e 100644
--- a/secure/lib/libcrypto/man/man3/EVP_idea_cbc.3
+++ b/secure/lib/libcrypto/man/man3/EVP_idea_cbc.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_IDEA_CBC 3"
-.TH EVP_IDEA_CBC 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_IDEA_CBC 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_md2.3 b/secure/lib/libcrypto/man/man3/EVP_md2.3
index e01f4d9e9c4e4..2c6ffc254955e 100644
--- a/secure/lib/libcrypto/man/man3/EVP_md2.3
+++ b/secure/lib/libcrypto/man/man3/EVP_md2.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_MD2 3"
-.TH EVP_MD2 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_MD2 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_md4.3 b/secure/lib/libcrypto/man/man3/EVP_md4.3
index cfcf27ce4acb0..f41fa18cbdddb 100644
--- a/secure/lib/libcrypto/man/man3/EVP_md4.3
+++ b/secure/lib/libcrypto/man/man3/EVP_md4.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_MD4 3"
-.TH EVP_MD4 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_MD4 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_md5.3 b/secure/lib/libcrypto/man/man3/EVP_md5.3
index f512a8de247cd..ecb3734bc3e44 100644
--- a/secure/lib/libcrypto/man/man3/EVP_md5.3
+++ b/secure/lib/libcrypto/man/man3/EVP_md5.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_MD5 3"
-.TH EVP_MD5 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_MD5 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_mdc2.3 b/secure/lib/libcrypto/man/man3/EVP_mdc2.3
index 2f76da399c2dc..e24d48da242ff 100644
--- a/secure/lib/libcrypto/man/man3/EVP_mdc2.3
+++ b/secure/lib/libcrypto/man/man3/EVP_mdc2.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_MDC2 3"
-.TH EVP_MDC2 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_MDC2 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_rc2_cbc.3 b/secure/lib/libcrypto/man/man3/EVP_rc2_cbc.3
index 7129201573a3e..454136b95c0d2 100644
--- a/secure/lib/libcrypto/man/man3/EVP_rc2_cbc.3
+++ b/secure/lib/libcrypto/man/man3/EVP_rc2_cbc.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_RC2_CBC 3"
-.TH EVP_RC2_CBC 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_RC2_CBC 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_rc4.3 b/secure/lib/libcrypto/man/man3/EVP_rc4.3
index ddf2d51534168..9f94018915eb8 100644
--- a/secure/lib/libcrypto/man/man3/EVP_rc4.3
+++ b/secure/lib/libcrypto/man/man3/EVP_rc4.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_RC4 3"
-.TH EVP_RC4 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_RC4 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_rc5_32_12_16_cbc.3 b/secure/lib/libcrypto/man/man3/EVP_rc5_32_12_16_cbc.3
index a6d21839c41ca..8070bacbee01c 100644
--- a/secure/lib/libcrypto/man/man3/EVP_rc5_32_12_16_cbc.3
+++ b/secure/lib/libcrypto/man/man3/EVP_rc5_32_12_16_cbc.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_RC5_32_12_16_CBC 3"
-.TH EVP_RC5_32_12_16_CBC 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_RC5_32_12_16_CBC 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_ripemd160.3 b/secure/lib/libcrypto/man/man3/EVP_ripemd160.3
index 8380f78ef3a5e..d9aee0a1d6be0 100644
--- a/secure/lib/libcrypto/man/man3/EVP_ripemd160.3
+++ b/secure/lib/libcrypto/man/man3/EVP_ripemd160.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_RIPEMD160 3"
-.TH EVP_RIPEMD160 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_RIPEMD160 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_seed_cbc.3 b/secure/lib/libcrypto/man/man3/EVP_seed_cbc.3
index 6dd6417db97fc..ec38fb19afaa3 100644
--- a/secure/lib/libcrypto/man/man3/EVP_seed_cbc.3
+++ b/secure/lib/libcrypto/man/man3/EVP_seed_cbc.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_SEED_CBC 3"
-.TH EVP_SEED_CBC 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_SEED_CBC 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_sha1.3 b/secure/lib/libcrypto/man/man3/EVP_sha1.3
index fd688d5f4c04d..4342f3853deb2 100644
--- a/secure/lib/libcrypto/man/man3/EVP_sha1.3
+++ b/secure/lib/libcrypto/man/man3/EVP_sha1.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_SHA1 3"
-.TH EVP_SHA1 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_SHA1 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_sha224.3 b/secure/lib/libcrypto/man/man3/EVP_sha224.3
index 3863fcdba82c4..fd7eff30bd76f 100644
--- a/secure/lib/libcrypto/man/man3/EVP_sha224.3
+++ b/secure/lib/libcrypto/man/man3/EVP_sha224.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_SHA224 3"
-.TH EVP_SHA224 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_SHA224 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_sha3_224.3 b/secure/lib/libcrypto/man/man3/EVP_sha3_224.3
index 80912d1f03f49..9cdfc49965471 100644
--- a/secure/lib/libcrypto/man/man3/EVP_sha3_224.3
+++ b/secure/lib/libcrypto/man/man3/EVP_sha3_224.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_SHA3_224 3"
-.TH EVP_SHA3_224 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_SHA3_224 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_sm3.3 b/secure/lib/libcrypto/man/man3/EVP_sm3.3
index b43489ffe60ec..0fd3267b031a8 100644
--- a/secure/lib/libcrypto/man/man3/EVP_sm3.3
+++ b/secure/lib/libcrypto/man/man3/EVP_sm3.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_SM3 3"
-.TH EVP_SM3 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_SM3 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_sm4_cbc.3 b/secure/lib/libcrypto/man/man3/EVP_sm4_cbc.3
index f1bb99f33e4a1..3fee57b8d8c2e 100644
--- a/secure/lib/libcrypto/man/man3/EVP_sm4_cbc.3
+++ b/secure/lib/libcrypto/man/man3/EVP_sm4_cbc.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_SM4_CBC 3"
-.TH EVP_SM4_CBC 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_SM4_CBC 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/EVP_whirlpool.3 b/secure/lib/libcrypto/man/man3/EVP_whirlpool.3
index 3b8c83942c320..7ab3bd9ecea5a 100644
--- a/secure/lib/libcrypto/man/man3/EVP_whirlpool.3
+++ b/secure/lib/libcrypto/man/man3/EVP_whirlpool.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP_WHIRLPOOL 3"
-.TH EVP_WHIRLPOOL 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP_WHIRLPOOL 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/HMAC.3 b/secure/lib/libcrypto/man/man3/HMAC.3
index 4ff9e1ddcc849..1f7fafe9f0332 100644
--- a/secure/lib/libcrypto/man/man3/HMAC.3
+++ b/secure/lib/libcrypto/man/man3/HMAC.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "HMAC 3"
-.TH HMAC 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH HMAC 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/MD5.3 b/secure/lib/libcrypto/man/man3/MD5.3
index 3d127a9e504ff..29a42cbfd8a9e 100644
--- a/secure/lib/libcrypto/man/man3/MD5.3
+++ b/secure/lib/libcrypto/man/man3/MD5.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "MD5 3"
-.TH MD5 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH MD5 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/MDC2_Init.3 b/secure/lib/libcrypto/man/man3/MDC2_Init.3
index eec665c127048..d9b648d4c6d0a 100644
--- a/secure/lib/libcrypto/man/man3/MDC2_Init.3
+++ b/secure/lib/libcrypto/man/man3/MDC2_Init.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "MDC2_INIT 3"
-.TH MDC2_INIT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH MDC2_INIT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/Makefile b/secure/lib/libcrypto/man/man3/Makefile
index da6c0c8a8e881..e315bc7be8173 100644
--- a/secure/lib/libcrypto/man/man3/Makefile
+++ b/secure/lib/libcrypto/man/man3/Makefile
@@ -164,6 +164,7 @@ MAN+= EVP_PKEY_new.3
MAN+= EVP_PKEY_print_private.3
MAN+= EVP_PKEY_set1_RSA.3
MAN+= EVP_PKEY_sign.3
+MAN+= EVP_PKEY_size.3
MAN+= EVP_PKEY_verify.3
MAN+= EVP_PKEY_verify_recover.3
MAN+= EVP_SealInit.3
@@ -417,6 +418,7 @@ MAN+= X509V3_get_d2i.3
MAN+= X509_ALGOR_dup.3
MAN+= X509_CRL_get0_by_serial.3
MAN+= X509_EXTENSION_set_object.3
+MAN+= X509_LOOKUP.3
MAN+= X509_LOOKUP_hash_dir.3
MAN+= X509_LOOKUP_meth_new.3
MAN+= X509_NAME_ENTRY_get_object.3
@@ -1375,6 +1377,7 @@ MLINKS+= EVP_CIPHER_meth_new.3 EVP_CIPHER_meth_set_impl_ctx_size.3
MLINKS+= EVP_CIPHER_meth_new.3 EVP_CIPHER_meth_set_init.3
MLINKS+= EVP_CIPHER_meth_new.3 EVP_CIPHER_meth_set_iv_length.3
MLINKS+= EVP_CIPHER_meth_new.3 EVP_CIPHER_meth_set_set_asn1_params.3
+MLINKS+= EVP_DigestInit.3 EVP_Digest.3
MLINKS+= EVP_DigestInit.3 EVP_DigestFinal.3
MLINKS+= EVP_DigestInit.3 EVP_DigestFinalXOF.3
MLINKS+= EVP_DigestInit.3 EVP_DigestFinal_ex.3
@@ -1389,13 +1392,17 @@ MLINKS+= EVP_DigestInit.3 EVP_MD_CTX_free.3
MLINKS+= EVP_DigestInit.3 EVP_MD_CTX_md.3
MLINKS+= EVP_DigestInit.3 EVP_MD_CTX_md_data.3
MLINKS+= EVP_DigestInit.3 EVP_MD_CTX_new.3
+MLINKS+= EVP_DigestInit.3 EVP_MD_CTX_pkey_ctx.3
MLINKS+= EVP_DigestInit.3 EVP_MD_CTX_reset.3
MLINKS+= EVP_DigestInit.3 EVP_MD_CTX_set_flags.3
MLINKS+= EVP_DigestInit.3 EVP_MD_CTX_set_pkey_ctx.3
+MLINKS+= EVP_DigestInit.3 EVP_MD_CTX_set_update_fn.3
MLINKS+= EVP_DigestInit.3 EVP_MD_CTX_size.3
MLINKS+= EVP_DigestInit.3 EVP_MD_CTX_test_flags.3
MLINKS+= EVP_DigestInit.3 EVP_MD_CTX_type.3
+MLINKS+= EVP_DigestInit.3 EVP_MD_CTX_update_fn.3
MLINKS+= EVP_DigestInit.3 EVP_MD_block_size.3
+MLINKS+= EVP_DigestInit.3 EVP_MD_flags.3
MLINKS+= EVP_DigestInit.3 EVP_MD_pkey_type.3
MLINKS+= EVP_DigestInit.3 EVP_MD_size.3
MLINKS+= EVP_DigestInit.3 EVP_MD_type.3
@@ -1546,6 +1553,8 @@ MLINKS+= EVP_PKEY_CTX_ctrl.3 EVP_PKEY_CTX_set_dh_paramgen_type.3
MLINKS+= EVP_PKEY_CTX_ctrl.3 EVP_PKEY_CTX_set_dh_rfc5114.3
MLINKS+= EVP_PKEY_CTX_ctrl.3 EVP_PKEY_CTX_set_dhx_rfc5114.3
MLINKS+= EVP_PKEY_CTX_ctrl.3 EVP_PKEY_CTX_set_dsa_paramgen_bits.3
+MLINKS+= EVP_PKEY_CTX_ctrl.3 EVP_PKEY_CTX_set_dsa_paramgen_md.3
+MLINKS+= EVP_PKEY_CTX_ctrl.3 EVP_PKEY_CTX_set_dsa_paramgen_q_bits.3
MLINKS+= EVP_PKEY_CTX_ctrl.3 EVP_PKEY_CTX_set_ec_param_enc.3
MLINKS+= EVP_PKEY_CTX_ctrl.3 EVP_PKEY_CTX_set_ec_paramgen_curve_nid.3
MLINKS+= EVP_PKEY_CTX_ctrl.3 EVP_PKEY_CTX_set_ecdh_cofactor_mode.3
@@ -1613,6 +1622,8 @@ MLINKS+= EVP_PKEY_meth_new.3 EVP_PKEY_meth_get_ctrl.3
MLINKS+= EVP_PKEY_meth_new.3 EVP_PKEY_meth_get_decrypt.3
MLINKS+= EVP_PKEY_meth_new.3 EVP_PKEY_meth_get_derive.3
MLINKS+= EVP_PKEY_meth_new.3 EVP_PKEY_meth_get_digest_custom.3
+MLINKS+= EVP_PKEY_meth_new.3 EVP_PKEY_meth_get_digestsign.3
+MLINKS+= EVP_PKEY_meth_new.3 EVP_PKEY_meth_get_digestverify.3
MLINKS+= EVP_PKEY_meth_new.3 EVP_PKEY_meth_get_encrypt.3
MLINKS+= EVP_PKEY_meth_new.3 EVP_PKEY_meth_get_init.3
MLINKS+= EVP_PKEY_meth_new.3 EVP_PKEY_meth_get_keygen.3
@@ -1632,6 +1643,8 @@ MLINKS+= EVP_PKEY_meth_new.3 EVP_PKEY_meth_set_ctrl.3
MLINKS+= EVP_PKEY_meth_new.3 EVP_PKEY_meth_set_decrypt.3
MLINKS+= EVP_PKEY_meth_new.3 EVP_PKEY_meth_set_derive.3
MLINKS+= EVP_PKEY_meth_new.3 EVP_PKEY_meth_set_digest_custom.3
+MLINKS+= EVP_PKEY_meth_new.3 EVP_PKEY_meth_set_digestsign.3
+MLINKS+= EVP_PKEY_meth_new.3 EVP_PKEY_meth_set_digestverify.3
MLINKS+= EVP_PKEY_meth_new.3 EVP_PKEY_meth_set_encrypt.3
MLINKS+= EVP_PKEY_meth_new.3 EVP_PKEY_meth_set_init.3
MLINKS+= EVP_PKEY_meth_new.3 EVP_PKEY_meth_set_keygen.3
@@ -1680,12 +1693,12 @@ MLINKS+= EVP_PKEY_set1_RSA.3 EVP_PKEY_set1_engine.3
MLINKS+= EVP_PKEY_set1_RSA.3 EVP_PKEY_set_alias_type.3
MLINKS+= EVP_PKEY_set1_RSA.3 EVP_PKEY_type.3
MLINKS+= EVP_PKEY_sign.3 EVP_PKEY_sign_init.3
+MLINKS+= EVP_PKEY_size.3 EVP_PKEY_bits.3
+MLINKS+= EVP_PKEY_size.3 EVP_PKEY_security_bits.3
MLINKS+= EVP_PKEY_verify.3 EVP_PKEY_verify_init.3
MLINKS+= EVP_PKEY_verify_recover.3 EVP_PKEY_verify_recover_init.3
MLINKS+= EVP_SealInit.3 EVP_SealFinal.3
MLINKS+= EVP_SealInit.3 EVP_SealUpdate.3
-MLINKS+= EVP_SignInit.3 EVP_PKEY_security_bits.3
-MLINKS+= EVP_SignInit.3 EVP_PKEY_size.3
MLINKS+= EVP_SignInit.3 EVP_SignFinal.3
MLINKS+= EVP_SignInit.3 EVP_SignInit_ex.3
MLINKS+= EVP_SignInit.3 EVP_SignUpdate.3
@@ -2015,6 +2028,7 @@ MLINKS+= OPENSSL_malloc.3 OPENSSL_strlcat.3
MLINKS+= OPENSSL_malloc.3 OPENSSL_strlcpy.3
MLINKS+= OPENSSL_malloc.3 OPENSSL_strndup.3
MLINKS+= OPENSSL_malloc.3 OPENSSL_zalloc.3
+MLINKS+= OPENSSL_secure_malloc.3 CRYPTO_secure_allocated.3
MLINKS+= OPENSSL_secure_malloc.3 CRYPTO_secure_clear_free.3
MLINKS+= OPENSSL_secure_malloc.3 CRYPTO_secure_free.3
MLINKS+= OPENSSL_secure_malloc.3 CRYPTO_secure_malloc.3
@@ -2153,6 +2167,7 @@ MLINKS+= PEM_read_bio_PrivateKey.3 PEM_read_bio_DSA_PUBKEY.3
MLINKS+= PEM_read_bio_PrivateKey.3 PEM_read_bio_DSAparams.3
MLINKS+= PEM_read_bio_PrivateKey.3 PEM_read_bio_PKCS7.3
MLINKS+= PEM_read_bio_PrivateKey.3 PEM_read_bio_PUBKEY.3
+MLINKS+= PEM_read_bio_PrivateKey.3 PEM_read_bio_Parameters.3
MLINKS+= PEM_read_bio_PrivateKey.3 PEM_read_bio_RSAPrivateKey.3
MLINKS+= PEM_read_bio_PrivateKey.3 PEM_read_bio_RSAPublicKey.3
MLINKS+= PEM_read_bio_PrivateKey.3 PEM_read_bio_RSA_PUBKEY.3
@@ -2185,6 +2200,7 @@ MLINKS+= PEM_read_bio_PrivateKey.3 PEM_write_bio_PKCS7.3
MLINKS+= PEM_read_bio_PrivateKey.3 PEM_write_bio_PKCS8PrivateKey.3
MLINKS+= PEM_read_bio_PrivateKey.3 PEM_write_bio_PKCS8PrivateKey_nid.3
MLINKS+= PEM_read_bio_PrivateKey.3 PEM_write_bio_PUBKEY.3
+MLINKS+= PEM_read_bio_PrivateKey.3 PEM_write_bio_Parameters.3
MLINKS+= PEM_read_bio_PrivateKey.3 PEM_write_bio_PrivateKey.3
MLINKS+= PEM_read_bio_PrivateKey.3 PEM_write_bio_PrivateKey_traditional.3
MLINKS+= PEM_read_bio_PrivateKey.3 PEM_write_bio_RSAPrivateKey.3
@@ -2254,6 +2270,7 @@ MLINKS+= RSA_get0_key.3 RSA_get0_multi_prime_crt_params.3
MLINKS+= RSA_get0_key.3 RSA_get0_multi_prime_factors.3
MLINKS+= RSA_get0_key.3 RSA_get0_n.3
MLINKS+= RSA_get0_key.3 RSA_get0_p.3
+MLINKS+= RSA_get0_key.3 RSA_get0_pss_params.3
MLINKS+= RSA_get0_key.3 RSA_get0_q.3
MLINKS+= RSA_get0_key.3 RSA_get_multi_prime_extra_count.3
MLINKS+= RSA_get0_key.3 RSA_get_version.3
@@ -2643,6 +2660,8 @@ MLINKS+= SSL_CTX_set_split_send_fragment.3 SSL_set_split_send_fragment.3
MLINKS+= SSL_CTX_set_split_send_fragment.3 SSL_set_tlsext_max_fragment_length.3
MLINKS+= SSL_CTX_set_ssl_version.3 SSL_get_ssl_method.3
MLINKS+= SSL_CTX_set_ssl_version.3 SSL_set_ssl_method.3
+MLINKS+= SSL_CTX_set_stateless_cookie_generate_cb.3 SSL_CTX_set_cookie_generate_cb.3
+MLINKS+= SSL_CTX_set_stateless_cookie_generate_cb.3 SSL_CTX_set_cookie_verify_cb.3
MLINKS+= SSL_CTX_set_stateless_cookie_generate_cb.3 SSL_CTX_set_stateless_cookie_verify_cb.3
MLINKS+= SSL_CTX_set_timeout.3 SSL_CTX_get_timeout.3
MLINKS+= SSL_CTX_set_tlsext_servername_callback.3 SSL_CTX_set_tlsext_servername_arg.3
@@ -2779,6 +2798,8 @@ MLINKS+= SSL_key_update.3 SSL_renegotiate.3
MLINKS+= SSL_key_update.3 SSL_renegotiate_abbreviated.3
MLINKS+= SSL_key_update.3 SSL_renegotiate_pending.3
MLINKS+= SSL_library_init.3 OpenSSL_add_ssl_algorithms.3
+MLINKS+= SSL_load_client_CA_file.3 SSL_add_dir_cert_subjects_to_stack.3
+MLINKS+= SSL_load_client_CA_file.3 SSL_add_file_cert_subjects_to_stack.3
MLINKS+= SSL_new.3 SSL_dup.3
MLINKS+= SSL_new.3 SSL_up_ref.3
MLINKS+= SSL_pending.3 SSL_has_pending.3
@@ -2911,17 +2932,31 @@ MLINKS+= X509_EXTENSION_set_object.3 X509_EXTENSION_get_data.3
MLINKS+= X509_EXTENSION_set_object.3 X509_EXTENSION_get_object.3
MLINKS+= X509_EXTENSION_set_object.3 X509_EXTENSION_set_critical.3
MLINKS+= X509_EXTENSION_set_object.3 X509_EXTENSION_set_data.3
+MLINKS+= X509_LOOKUP.3 X509_LOOKUP_TYPE.3
+MLINKS+= X509_LOOKUP.3 X509_LOOKUP_add_dir.3
+MLINKS+= X509_LOOKUP.3 X509_LOOKUP_by_alias.3
+MLINKS+= X509_LOOKUP.3 X509_LOOKUP_by_fingerprint.3
+MLINKS+= X509_LOOKUP.3 X509_LOOKUP_by_issuer_serial.3
+MLINKS+= X509_LOOKUP.3 X509_LOOKUP_by_subject.3
+MLINKS+= X509_LOOKUP.3 X509_LOOKUP_ctrl.3
+MLINKS+= X509_LOOKUP.3 X509_LOOKUP_free.3
+MLINKS+= X509_LOOKUP.3 X509_LOOKUP_get_method_data.3
+MLINKS+= X509_LOOKUP.3 X509_LOOKUP_get_store.3
+MLINKS+= X509_LOOKUP.3 X509_LOOKUP_init.3
+MLINKS+= X509_LOOKUP.3 X509_LOOKUP_load_file.3
+MLINKS+= X509_LOOKUP.3 X509_LOOKUP_new.3
+MLINKS+= X509_LOOKUP.3 X509_LOOKUP_set_method_data.3
+MLINKS+= X509_LOOKUP.3 X509_LOOKUP_shutdown.3
MLINKS+= X509_LOOKUP_hash_dir.3 X509_LOOKUP_file.3
MLINKS+= X509_LOOKUP_hash_dir.3 X509_load_cert_crl_file.3
MLINKS+= X509_LOOKUP_hash_dir.3 X509_load_cert_file.3
MLINKS+= X509_LOOKUP_hash_dir.3 X509_load_crl_file.3
+MLINKS+= X509_LOOKUP_meth_new.3 X509_LOOKUP_METHOD.3
MLINKS+= X509_LOOKUP_meth_new.3 X509_LOOKUP_ctrl_fn.3
MLINKS+= X509_LOOKUP_meth_new.3 X509_LOOKUP_get_by_alias_fn.3
MLINKS+= X509_LOOKUP_meth_new.3 X509_LOOKUP_get_by_fingerprint_fn.3
MLINKS+= X509_LOOKUP_meth_new.3 X509_LOOKUP_get_by_issuer_serial_fn.3
MLINKS+= X509_LOOKUP_meth_new.3 X509_LOOKUP_get_by_subject_fn.3
-MLINKS+= X509_LOOKUP_meth_new.3 X509_LOOKUP_get_method_data.3
-MLINKS+= X509_LOOKUP_meth_new.3 X509_LOOKUP_get_store.3
MLINKS+= X509_LOOKUP_meth_new.3 X509_LOOKUP_meth_free.3
MLINKS+= X509_LOOKUP_meth_new.3 X509_LOOKUP_meth_get_ctrl.3
MLINKS+= X509_LOOKUP_meth_new.3 X509_LOOKUP_meth_get_free.3
@@ -2941,7 +2976,6 @@ MLINKS+= X509_LOOKUP_meth_new.3 X509_LOOKUP_meth_set_get_by_subject.3
MLINKS+= X509_LOOKUP_meth_new.3 X509_LOOKUP_meth_set_init.3
MLINKS+= X509_LOOKUP_meth_new.3 X509_LOOKUP_meth_set_new_item.3
MLINKS+= X509_LOOKUP_meth_new.3 X509_LOOKUP_meth_set_shutdown.3
-MLINKS+= X509_LOOKUP_meth_new.3 X509_LOOKUP_set_method_data.3
MLINKS+= X509_LOOKUP_meth_new.3 X509_OBJECT_set1_X509.3
MLINKS+= X509_LOOKUP_meth_new.3 X509_OBJECT_set1_X509_CRL.3
MLINKS+= X509_NAME_ENTRY_get_object.3 X509_NAME_ENTRY_create_by_NID.3
@@ -3011,7 +3045,9 @@ MLINKS+= X509_STORE_CTX_set_verify_cb.3 X509_STORE_CTX_get_lookup_certs.3
MLINKS+= X509_STORE_CTX_set_verify_cb.3 X509_STORE_CTX_get_lookup_crls.3
MLINKS+= X509_STORE_CTX_set_verify_cb.3 X509_STORE_CTX_get_verify_cb.3
MLINKS+= X509_STORE_CTX_set_verify_cb.3 X509_STORE_CTX_verify_cb.3
+MLINKS+= X509_STORE_add_cert.3 X509_STORE.3
MLINKS+= X509_STORE_add_cert.3 X509_STORE_add_crl.3
+MLINKS+= X509_STORE_add_cert.3 X509_STORE_add_lookup.3
MLINKS+= X509_STORE_add_cert.3 X509_STORE_load_locations.3
MLINKS+= X509_STORE_add_cert.3 X509_STORE_set_default_paths.3
MLINKS+= X509_STORE_add_cert.3 X509_STORE_set_depth.3
diff --git a/secure/lib/libcrypto/man/man3/OBJ_nid2obj.3 b/secure/lib/libcrypto/man/man3/OBJ_nid2obj.3
index 626bf1acd2613..1d2d01adbe382 100644
--- a/secure/lib/libcrypto/man/man3/OBJ_nid2obj.3
+++ b/secure/lib/libcrypto/man/man3/OBJ_nid2obj.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "OBJ_NID2OBJ 3"
-.TH OBJ_NID2OBJ 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH OBJ_NID2OBJ 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/OCSP_REQUEST_new.3 b/secure/lib/libcrypto/man/man3/OCSP_REQUEST_new.3
index 7411aaa5561b7..f770cd2c6aab5 100644
--- a/secure/lib/libcrypto/man/man3/OCSP_REQUEST_new.3
+++ b/secure/lib/libcrypto/man/man3/OCSP_REQUEST_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "OCSP_REQUEST_NEW 3"
-.TH OCSP_REQUEST_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH OCSP_REQUEST_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/OCSP_cert_to_id.3 b/secure/lib/libcrypto/man/man3/OCSP_cert_to_id.3
index 4e363ece6a158..ada443513c37e 100644
--- a/secure/lib/libcrypto/man/man3/OCSP_cert_to_id.3
+++ b/secure/lib/libcrypto/man/man3/OCSP_cert_to_id.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "OCSP_CERT_TO_ID 3"
-.TH OCSP_CERT_TO_ID 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH OCSP_CERT_TO_ID 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/OCSP_request_add1_nonce.3 b/secure/lib/libcrypto/man/man3/OCSP_request_add1_nonce.3
index a79b659337daf..96ef74b3537f9 100644
--- a/secure/lib/libcrypto/man/man3/OCSP_request_add1_nonce.3
+++ b/secure/lib/libcrypto/man/man3/OCSP_request_add1_nonce.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "OCSP_REQUEST_ADD1_NONCE 3"
-.TH OCSP_REQUEST_ADD1_NONCE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH OCSP_REQUEST_ADD1_NONCE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
@@ -161,7 +161,7 @@ it adds a nonce to \s-1OCSP\s0 basic response \fBresp\fR.
.PP
\&\fBOCSP_check_nonce()\fR compares the nonce value in \fBreq\fR and \fBresp\fR.
.PP
-\&\fBOCSP_copy_nonce()\fR copys any nonce value present in \fBreq\fR to \fBresp\fR.
+\&\fBOCSP_copy_nonce()\fR copies any nonce value present in \fBreq\fR to \fBresp\fR.
.SH "RETURN VALUES"
.IX Header "RETURN VALUES"
\&\fBOCSP_request_add1_nonce()\fR and \fBOCSP_basic_add1_nonce()\fR return 1 for success
@@ -207,7 +207,7 @@ condition.
\&\fBOCSP_sendreq_new\fR\|(3)
.SH "COPYRIGHT"
.IX Header "COPYRIGHT"
-Copyright 2015\-2016 The OpenSSL Project Authors. All Rights Reserved.
+Copyright 2015\-2020 The OpenSSL Project Authors. All Rights Reserved.
.PP
Licensed under the OpenSSL license (the \*(L"License\*(R"). You may not use
this file except in compliance with the License. You can obtain a copy
diff --git a/secure/lib/libcrypto/man/man3/OCSP_resp_find_status.3 b/secure/lib/libcrypto/man/man3/OCSP_resp_find_status.3
index 302ebaca82643..e89cb7c73d384 100644
--- a/secure/lib/libcrypto/man/man3/OCSP_resp_find_status.3
+++ b/secure/lib/libcrypto/man/man3/OCSP_resp_find_status.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "OCSP_RESP_FIND_STATUS 3"
-.TH OCSP_RESP_FIND_STATUS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH OCSP_RESP_FIND_STATUS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/OCSP_response_status.3 b/secure/lib/libcrypto/man/man3/OCSP_response_status.3
index 5a2283240bcae..1384c5a28e754 100644
--- a/secure/lib/libcrypto/man/man3/OCSP_response_status.3
+++ b/secure/lib/libcrypto/man/man3/OCSP_response_status.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "OCSP_RESPONSE_STATUS 3"
-.TH OCSP_RESPONSE_STATUS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH OCSP_RESPONSE_STATUS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
@@ -192,7 +192,7 @@ with the X509 certificate \fBcert\fR.
.PP
\&\fBOCSP_basic_sign()\fR signs \s-1OCSP\s0 response \fBbrsp\fR using certificate \fBsigner\fR, private key
\&\fBkey\fR, digest \fBdgst\fR and additional certificates \fBcerts\fR. If the \fBflags\fR option
-\&\fB\s-1OCSP_NOCERTS\s0\fR is set then no certificates will be included in the request. If the
+\&\fB\s-1OCSP_NOCERTS\s0\fR is set then no certificates will be included in the response. If the
\&\fBflags\fR option \fB\s-1OCSP_RESPID_KEY\s0\fR is set then the responder is identified by key \s-1ID\s0
rather than by name. \fBOCSP_basic_sign_ctx()\fR also signs \s-1OCSP\s0 response \fBbrsp\fR but
uses the parameters contained in digest context \fBctx\fR.
@@ -236,7 +236,7 @@ functions were added in OpenSSL 1.1.0a.
The \fBOCSP_basic_sign_ctx()\fR function was added in OpenSSL 1.1.1.
.SH "COPYRIGHT"
.IX Header "COPYRIGHT"
-Copyright 2015\-2018 The OpenSSL Project Authors. All Rights Reserved.
+Copyright 2015\-2020 The OpenSSL Project Authors. All Rights Reserved.
.PP
Licensed under the OpenSSL license (the \*(L"License\*(R"). You may not use
this file except in compliance with the License. You can obtain a copy
diff --git a/secure/lib/libcrypto/man/man3/OCSP_sendreq_new.3 b/secure/lib/libcrypto/man/man3/OCSP_sendreq_new.3
index ae83ab3b6d608..c9d0ba910d0c2 100644
--- a/secure/lib/libcrypto/man/man3/OCSP_sendreq_new.3
+++ b/secure/lib/libcrypto/man/man3/OCSP_sendreq_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "OCSP_SENDREQ_NEW 3"
-.TH OCSP_SENDREQ_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH OCSP_SENDREQ_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
@@ -159,8 +159,7 @@ OCSP_sendreq_new, OCSP_sendreq_nbio, OCSP_REQ_CTX_free, OCSP_set_max_response_le
\&
\& int OCSP_REQ_CTX_set1_req(OCSP_REQ_CTX *rctx, OCSP_REQUEST *req);
\&
-\& OCSP_RESPONSE *OCSP_sendreq_bio(BIO *io, const char *path, OCSP_REQUEST *req,
-\& int maxline);
+\& OCSP_RESPONSE *OCSP_sendreq_bio(BIO *io, const char *path, OCSP_REQUEST *req);
.Ve
.SH "DESCRIPTION"
.IX Header "DESCRIPTION"
@@ -189,8 +188,8 @@ additional headers are set.
function should be called after any calls to \fBOCSP_REQ_CTX_add1_header()\fR.
.PP
\&\fBOCSP_sendreq_bio()\fR performs an \s-1OCSP\s0 request using the responder \fBio\fR, the \s-1URL\s0
-path \fBpath\fR, the \s-1OCSP\s0 request \fBreq\fR and with a response header maximum line
-length of \fBmaxline\fR. If \fBmaxline\fR is zero a default value of 4k is used.
+path \fBpath\fR, and the \s-1OCSP\s0 request \fBreq\fR with a response header maximum line
+length 4k. It waits indefinitely on a response.
.SH "RETURN VALUES"
.IX Header "RETURN VALUES"
\&\fBOCSP_sendreq_new()\fR returns a valid \fB\s-1OCSP_REQ_CTX\s0\fR structure or \fB\s-1NULL\s0\fR if
@@ -245,7 +244,7 @@ applications is not recommended.
\&\fBOCSP_response_status\fR\|(3)
.SH "COPYRIGHT"
.IX Header "COPYRIGHT"
-Copyright 2015\-2016 The OpenSSL Project Authors. All Rights Reserved.
+Copyright 2015\-2020 The OpenSSL Project Authors. All Rights Reserved.
.PP
Licensed under the OpenSSL license (the \*(L"License\*(R"). You may not use
this file except in compliance with the License. You can obtain a copy
diff --git a/secure/lib/libcrypto/man/man3/OPENSSL_Applink.3 b/secure/lib/libcrypto/man/man3/OPENSSL_Applink.3
index 90d634ad4c9f6..d5c9808a81e1d 100644
--- a/secure/lib/libcrypto/man/man3/OPENSSL_Applink.3
+++ b/secure/lib/libcrypto/man/man3/OPENSSL_Applink.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "OPENSSL_APPLINK 3"
-.TH OPENSSL_APPLINK 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH OPENSSL_APPLINK 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/OPENSSL_LH_COMPFUNC.3 b/secure/lib/libcrypto/man/man3/OPENSSL_LH_COMPFUNC.3
index bf096f92889c3..c70be459f141d 100644
--- a/secure/lib/libcrypto/man/man3/OPENSSL_LH_COMPFUNC.3
+++ b/secure/lib/libcrypto/man/man3/OPENSSL_LH_COMPFUNC.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "OPENSSL_LH_COMPFUNC 3"
-.TH OPENSSL_LH_COMPFUNC 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH OPENSSL_LH_COMPFUNC 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/OPENSSL_LH_stats.3 b/secure/lib/libcrypto/man/man3/OPENSSL_LH_stats.3
index db711b2092c26..80b83eb76a107 100644
--- a/secure/lib/libcrypto/man/man3/OPENSSL_LH_stats.3
+++ b/secure/lib/libcrypto/man/man3/OPENSSL_LH_stats.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "OPENSSL_LH_STATS 3"
-.TH OPENSSL_LH_STATS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH OPENSSL_LH_STATS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/OPENSSL_VERSION_NUMBER.3 b/secure/lib/libcrypto/man/man3/OPENSSL_VERSION_NUMBER.3
index 0988b613a75eb..6f02ca5d5b188 100644
--- a/secure/lib/libcrypto/man/man3/OPENSSL_VERSION_NUMBER.3
+++ b/secure/lib/libcrypto/man/man3/OPENSSL_VERSION_NUMBER.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "OPENSSL_VERSION_NUMBER 3"
-.TH OPENSSL_VERSION_NUMBER 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH OPENSSL_VERSION_NUMBER 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/OPENSSL_config.3 b/secure/lib/libcrypto/man/man3/OPENSSL_config.3
index ea738358f6534..9e3261690f523 100644
--- a/secure/lib/libcrypto/man/man3/OPENSSL_config.3
+++ b/secure/lib/libcrypto/man/man3/OPENSSL_config.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "OPENSSL_CONFIG 3"
-.TH OPENSSL_CONFIG 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH OPENSSL_CONFIG 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/OPENSSL_fork_prepare.3 b/secure/lib/libcrypto/man/man3/OPENSSL_fork_prepare.3
index 599dc0197990b..cfb6e2cc35053 100644
--- a/secure/lib/libcrypto/man/man3/OPENSSL_fork_prepare.3
+++ b/secure/lib/libcrypto/man/man3/OPENSSL_fork_prepare.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "OPENSSL_FORK_PREPARE 3"
-.TH OPENSSL_FORK_PREPARE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH OPENSSL_FORK_PREPARE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/OPENSSL_ia32cap.3 b/secure/lib/libcrypto/man/man3/OPENSSL_ia32cap.3
index c2e3b850f0ca1..8db1f05c15ab3 100644
--- a/secure/lib/libcrypto/man/man3/OPENSSL_ia32cap.3
+++ b/secure/lib/libcrypto/man/man3/OPENSSL_ia32cap.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "OPENSSL_IA32CAP 3"
-.TH OPENSSL_IA32CAP 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH OPENSSL_IA32CAP 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/OPENSSL_init_crypto.3 b/secure/lib/libcrypto/man/man3/OPENSSL_init_crypto.3
index 18c08554a47ca..a0543eaceae02 100644
--- a/secure/lib/libcrypto/man/man3/OPENSSL_init_crypto.3
+++ b/secure/lib/libcrypto/man/man3/OPENSSL_init_crypto.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "OPENSSL_INIT_CRYPTO 3"
-.TH OPENSSL_INIT_CRYPTO 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH OPENSSL_INIT_CRYPTO 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/OPENSSL_init_ssl.3 b/secure/lib/libcrypto/man/man3/OPENSSL_init_ssl.3
index 349bbd3aae087..9458ae749c102 100644
--- a/secure/lib/libcrypto/man/man3/OPENSSL_init_ssl.3
+++ b/secure/lib/libcrypto/man/man3/OPENSSL_init_ssl.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "OPENSSL_INIT_SSL 3"
-.TH OPENSSL_INIT_SSL 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH OPENSSL_INIT_SSL 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/OPENSSL_instrument_bus.3 b/secure/lib/libcrypto/man/man3/OPENSSL_instrument_bus.3
index 8bd2a1e85a302..032e78f5ce31c 100644
--- a/secure/lib/libcrypto/man/man3/OPENSSL_instrument_bus.3
+++ b/secure/lib/libcrypto/man/man3/OPENSSL_instrument_bus.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "OPENSSL_INSTRUMENT_BUS 3"
-.TH OPENSSL_INSTRUMENT_BUS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH OPENSSL_INSTRUMENT_BUS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/OPENSSL_load_builtin_modules.3 b/secure/lib/libcrypto/man/man3/OPENSSL_load_builtin_modules.3
index 6ae621919108d..5e1360efaf032 100644
--- a/secure/lib/libcrypto/man/man3/OPENSSL_load_builtin_modules.3
+++ b/secure/lib/libcrypto/man/man3/OPENSSL_load_builtin_modules.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "OPENSSL_LOAD_BUILTIN_MODULES 3"
-.TH OPENSSL_LOAD_BUILTIN_MODULES 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH OPENSSL_LOAD_BUILTIN_MODULES 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/OPENSSL_malloc.3 b/secure/lib/libcrypto/man/man3/OPENSSL_malloc.3
index d4d9cc8b38ffb..7e5abbd2dc392 100644
--- a/secure/lib/libcrypto/man/man3/OPENSSL_malloc.3
+++ b/secure/lib/libcrypto/man/man3/OPENSSL_malloc.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "OPENSSL_MALLOC 3"
-.TH OPENSSL_MALLOC 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH OPENSSL_MALLOC 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
@@ -280,7 +280,7 @@ about what is being done.
For example, identifying the field names when parsing a complicated
data structure.
\&\fBOPENSSL_mem_debug_push()\fR (which calls \fBCRYPTO_mem_debug_push()\fR)
-attachs an identifying string to the allocation stack.
+attaches an identifying string to the allocation stack.
This must be a global or other static string; it is not copied.
\&\fBOPENSSL_mem_debug_pop()\fR removes identifying state from the stack.
.PP
@@ -363,7 +363,7 @@ configuration option\fR \f(CW\*(C`crypto\-mdebug\*(C'\fR \fIenabled. In case, s
only, say, the \f(BImalloc()\fI implementation is outright dangerous.\fR
.SH "COPYRIGHT"
.IX Header "COPYRIGHT"
-Copyright 2016\-2019 The OpenSSL Project Authors. All Rights Reserved.
+Copyright 2016\-2020 The OpenSSL Project Authors. All Rights Reserved.
.PP
Licensed under the OpenSSL license (the \*(L"License\*(R"). You may not use
this file except in compliance with the License. You can obtain a copy
diff --git a/secure/lib/libcrypto/man/man3/OPENSSL_secure_malloc.3 b/secure/lib/libcrypto/man/man3/OPENSSL_secure_malloc.3
index 032d205fd7858..e9a02f8d013b3 100644
--- a/secure/lib/libcrypto/man/man3/OPENSSL_secure_malloc.3
+++ b/secure/lib/libcrypto/man/man3/OPENSSL_secure_malloc.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,13 +133,13 @@
.\" ========================================================================
.\"
.IX Title "OPENSSL_SECURE_MALLOC 3"
-.TH OPENSSL_SECURE_MALLOC 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH OPENSSL_SECURE_MALLOC 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
.nh
.SH "NAME"
-CRYPTO_secure_malloc_init, CRYPTO_secure_malloc_initialized, CRYPTO_secure_malloc_done, OPENSSL_secure_malloc, CRYPTO_secure_malloc, OPENSSL_secure_zalloc, CRYPTO_secure_zalloc, OPENSSL_secure_free, CRYPTO_secure_free, OPENSSL_secure_clear_free, CRYPTO_secure_clear_free, OPENSSL_secure_actual_size, CRYPTO_secure_used \- secure heap storage
+CRYPTO_secure_malloc_init, CRYPTO_secure_malloc_initialized, CRYPTO_secure_malloc_done, OPENSSL_secure_malloc, CRYPTO_secure_malloc, OPENSSL_secure_zalloc, CRYPTO_secure_zalloc, OPENSSL_secure_free, CRYPTO_secure_free, OPENSSL_secure_clear_free, CRYPTO_secure_clear_free, OPENSSL_secure_actual_size, CRYPTO_secure_allocated, CRYPTO_secure_used \- secure heap storage
.SH "SYNOPSIS"
.IX Header "SYNOPSIS"
.Vb 1
@@ -165,6 +165,7 @@ CRYPTO_secure_malloc_init, CRYPTO_secure_malloc_initialized, CRYPTO_secure_mallo
\&
\& size_t OPENSSL_secure_actual_size(const void *ptr);
\&
+\& int CRYPTO_secure_allocated(const void *ptr);
\& size_t CRYPTO_secure_used();
.Ve
.SH "DESCRIPTION"
@@ -222,6 +223,8 @@ calling \fBOPENSSL_clear_free()\fR.
pointer; implementations may allocate more space than initially
requested, in order to \*(L"round up\*(R" and reduce secure heap fragmentation.
.PP
+\&\fBOPENSSL_secure_allocated()\fR tells if a pointer is allocated in the secure heap.
+.PP
\&\fBCRYPTO_secure_used()\fR returns the number of bytes allocated in the
secure heap.
.SH "RETURN VALUES"
diff --git a/secure/lib/libcrypto/man/man3/OSSL_STORE_INFO.3 b/secure/lib/libcrypto/man/man3/OSSL_STORE_INFO.3
index ea47f5858d9e3..ee057ce57291e 100644
--- a/secure/lib/libcrypto/man/man3/OSSL_STORE_INFO.3
+++ b/secure/lib/libcrypto/man/man3/OSSL_STORE_INFO.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "OSSL_STORE_INFO 3"
-.TH OSSL_STORE_INFO 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH OSSL_STORE_INFO 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/OSSL_STORE_LOADER.3 b/secure/lib/libcrypto/man/man3/OSSL_STORE_LOADER.3
index 45b15c3a644ab..fb684092e70de 100644
--- a/secure/lib/libcrypto/man/man3/OSSL_STORE_LOADER.3
+++ b/secure/lib/libcrypto/man/man3/OSSL_STORE_LOADER.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "OSSL_STORE_LOADER 3"
-.TH OSSL_STORE_LOADER 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH OSSL_STORE_LOADER 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/OSSL_STORE_SEARCH.3 b/secure/lib/libcrypto/man/man3/OSSL_STORE_SEARCH.3
index b707a97cd9be8..f8ec42d165779 100644
--- a/secure/lib/libcrypto/man/man3/OSSL_STORE_SEARCH.3
+++ b/secure/lib/libcrypto/man/man3/OSSL_STORE_SEARCH.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "OSSL_STORE_SEARCH 3"
-.TH OSSL_STORE_SEARCH 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH OSSL_STORE_SEARCH 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/OSSL_STORE_expect.3 b/secure/lib/libcrypto/man/man3/OSSL_STORE_expect.3
index 3a6ae5f85a79c..09f115a014370 100644
--- a/secure/lib/libcrypto/man/man3/OSSL_STORE_expect.3
+++ b/secure/lib/libcrypto/man/man3/OSSL_STORE_expect.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "OSSL_STORE_EXPECT 3"
-.TH OSSL_STORE_EXPECT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH OSSL_STORE_EXPECT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/OSSL_STORE_open.3 b/secure/lib/libcrypto/man/man3/OSSL_STORE_open.3
index b69fe039e4214..a3365e9f074d6 100644
--- a/secure/lib/libcrypto/man/man3/OSSL_STORE_open.3
+++ b/secure/lib/libcrypto/man/man3/OSSL_STORE_open.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "OSSL_STORE_OPEN 3"
-.TH OSSL_STORE_OPEN 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH OSSL_STORE_OPEN 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/OpenSSL_add_all_algorithms.3 b/secure/lib/libcrypto/man/man3/OpenSSL_add_all_algorithms.3
index 262ded0944e80..0a4eb0d8b8651 100644
--- a/secure/lib/libcrypto/man/man3/OpenSSL_add_all_algorithms.3
+++ b/secure/lib/libcrypto/man/man3/OpenSSL_add_all_algorithms.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "OPENSSL_ADD_ALL_ALGORITHMS 3"
-.TH OPENSSL_ADD_ALL_ALGORITHMS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH OPENSSL_ADD_ALL_ALGORITHMS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/PEM_bytes_read_bio.3 b/secure/lib/libcrypto/man/man3/PEM_bytes_read_bio.3
index 08eb6eb4bbdc1..a72cbdc2e2c7d 100644
--- a/secure/lib/libcrypto/man/man3/PEM_bytes_read_bio.3
+++ b/secure/lib/libcrypto/man/man3/PEM_bytes_read_bio.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "PEM_BYTES_READ_BIO 3"
-.TH PEM_BYTES_READ_BIO 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH PEM_BYTES_READ_BIO 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
@@ -154,7 +154,8 @@ PEM_bytes_read_bio, PEM_bytes_read_bio_secmem \- read a PEM\-encoded data struct
.Ve
.SH "DESCRIPTION"
.IX Header "DESCRIPTION"
-\&\fBPEM_bytes_read_bio()\fR reads PEM-formatted (\s-1RFC 1421\s0) data from the \s-1BIO\s0
+\&\fBPEM_bytes_read_bio()\fR reads PEM-formatted (\s-1IETF RFC 1421\s0 and \s-1IETF RFC 7468\s0)
+data from the \s-1BIO\s0
\&\fIbp\fR for the data type given in \fIname\fR (\s-1RSA PRIVATE KEY, CERTIFICATE,\s0
etc.). If multiple PEM-encoded data structures are present in the same
stream, \fBPEM_bytes_read_bio()\fR will skip non-matching data types and
@@ -200,7 +201,6 @@ It will simply be treated as a byte sequence.
0 for failure.
.SH "SEE ALSO"
.IX Header "SEE ALSO"
-\&\s-1\fBPEM\s0\fR\|(3),
\&\fBPEM_read_bio_ex\fR\|(3),
\&\fBpassphrase\-encoding\fR\|(7)
.SH "HISTORY"
diff --git a/secure/lib/libcrypto/man/man3/PEM_read.3 b/secure/lib/libcrypto/man/man3/PEM_read.3
index 3b8dbd89ac054..15759afb4a9a2 100644
--- a/secure/lib/libcrypto/man/man3/PEM_read.3
+++ b/secure/lib/libcrypto/man/man3/PEM_read.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "PEM_READ 3"
-.TH PEM_READ 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH PEM_READ 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/PEM_read_CMS.3 b/secure/lib/libcrypto/man/man3/PEM_read_CMS.3
index 40005ae8c6b9d..98fb06a0ca26f 100644
--- a/secure/lib/libcrypto/man/man3/PEM_read_CMS.3
+++ b/secure/lib/libcrypto/man/man3/PEM_read_CMS.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "PEM_READ_CMS 3"
-.TH PEM_READ_CMS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH PEM_READ_CMS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/PEM_read_bio_PrivateKey.3 b/secure/lib/libcrypto/man/man3/PEM_read_bio_PrivateKey.3
index 675bfcce7eb50..640ab6657ab8f 100644
--- a/secure/lib/libcrypto/man/man3/PEM_read_bio_PrivateKey.3
+++ b/secure/lib/libcrypto/man/man3/PEM_read_bio_PrivateKey.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,13 +133,13 @@
.\" ========================================================================
.\"
.IX Title "PEM_READ_BIO_PRIVATEKEY 3"
-.TH PEM_READ_BIO_PRIVATEKEY 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH PEM_READ_BIO_PRIVATEKEY 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
.nh
.SH "NAME"
-pem_password_cb, PEM_read_bio_PrivateKey, PEM_read_PrivateKey, PEM_write_bio_PrivateKey, PEM_write_bio_PrivateKey_traditional, PEM_write_PrivateKey, PEM_write_bio_PKCS8PrivateKey, PEM_write_PKCS8PrivateKey, PEM_write_bio_PKCS8PrivateKey_nid, PEM_write_PKCS8PrivateKey_nid, PEM_read_bio_PUBKEY, PEM_read_PUBKEY, PEM_write_bio_PUBKEY, PEM_write_PUBKEY, PEM_read_bio_RSAPrivateKey, PEM_read_RSAPrivateKey, PEM_write_bio_RSAPrivateKey, PEM_write_RSAPrivateKey, PEM_read_bio_RSAPublicKey, PEM_read_RSAPublicKey, PEM_write_bio_RSAPublicKey, PEM_write_RSAPublicKey, PEM_read_bio_RSA_PUBKEY, PEM_read_RSA_PUBKEY, PEM_write_bio_RSA_PUBKEY, PEM_write_RSA_PUBKEY, PEM_read_bio_DSAPrivateKey, PEM_read_DSAPrivateKey, PEM_write_bio_DSAPrivateKey, PEM_write_DSAPrivateKey, PEM_read_bio_DSA_PUBKEY, PEM_read_DSA_PUBKEY, PEM_write_bio_DSA_PUBKEY, PEM_write_DSA_PUBKEY, PEM_read_bio_DSAparams, PEM_read_DSAparams, PEM_write_bio_DSAparams, PEM_write_DSAparams, PEM_read_bio_DHparams, PEM_read_DHparams, PEM_write_bio_DHparams, PEM_write_DHparams, PEM_read_bio_X509, PEM_read_X509, PEM_write_bio_X509, PEM_write_X509, PEM_read_bio_X509_AUX, PEM_read_X509_AUX, PEM_write_bio_X509_AUX, PEM_write_X509_AUX, PEM_read_bio_X509_REQ, PEM_read_X509_REQ, PEM_write_bio_X509_REQ, PEM_write_X509_REQ, PEM_write_bio_X509_REQ_NEW, PEM_write_X509_REQ_NEW, PEM_read_bio_X509_CRL, PEM_read_X509_CRL, PEM_write_bio_X509_CRL, PEM_write_X509_CRL, PEM_read_bio_PKCS7, PEM_read_PKCS7, PEM_write_bio_PKCS7, PEM_write_PKCS7 \- PEM routines
+pem_password_cb, PEM_read_bio_PrivateKey, PEM_read_PrivateKey, PEM_write_bio_PrivateKey, PEM_write_bio_PrivateKey_traditional, PEM_write_PrivateKey, PEM_write_bio_PKCS8PrivateKey, PEM_write_PKCS8PrivateKey, PEM_write_bio_PKCS8PrivateKey_nid, PEM_write_PKCS8PrivateKey_nid, PEM_read_bio_PUBKEY, PEM_read_PUBKEY, PEM_write_bio_PUBKEY, PEM_write_PUBKEY, PEM_read_bio_RSAPrivateKey, PEM_read_RSAPrivateKey, PEM_write_bio_RSAPrivateKey, PEM_write_RSAPrivateKey, PEM_read_bio_RSAPublicKey, PEM_read_RSAPublicKey, PEM_write_bio_RSAPublicKey, PEM_write_RSAPublicKey, PEM_read_bio_RSA_PUBKEY, PEM_read_RSA_PUBKEY, PEM_write_bio_RSA_PUBKEY, PEM_write_RSA_PUBKEY, PEM_read_bio_DSAPrivateKey, PEM_read_DSAPrivateKey, PEM_write_bio_DSAPrivateKey, PEM_write_DSAPrivateKey, PEM_read_bio_DSA_PUBKEY, PEM_read_DSA_PUBKEY, PEM_write_bio_DSA_PUBKEY, PEM_write_DSA_PUBKEY, PEM_read_bio_Parameters, PEM_write_bio_Parameters, PEM_read_bio_DSAparams, PEM_read_DSAparams, PEM_write_bio_DSAparams, PEM_write_DSAparams, PEM_read_bio_DHparams, PEM_read_DHparams, PEM_write_bio_DHparams, PEM_write_DHparams, PEM_read_bio_X509, PEM_read_X509, PEM_write_bio_X509, PEM_write_X509, PEM_read_bio_X509_AUX, PEM_read_X509_AUX, PEM_write_bio_X509_AUX, PEM_write_X509_AUX, PEM_read_bio_X509_REQ, PEM_read_X509_REQ, PEM_write_bio_X509_REQ, PEM_write_X509_REQ, PEM_write_bio_X509_REQ_NEW, PEM_write_X509_REQ_NEW, PEM_read_bio_X509_CRL, PEM_read_X509_CRL, PEM_write_bio_X509_CRL, PEM_write_X509_CRL, PEM_read_bio_PKCS7, PEM_read_PKCS7, PEM_write_bio_PKCS7, PEM_write_PKCS7 \- PEM routines
.SH "SYNOPSIS"
.IX Header "SYNOPSIS"
.Vb 1
@@ -225,6 +225,9 @@ pem_password_cb, PEM_read_bio_PrivateKey, PEM_read_PrivateKey, PEM_write_bio_Pri
\& int PEM_write_bio_DSA_PUBKEY(BIO *bp, DSA *x);
\& int PEM_write_DSA_PUBKEY(FILE *fp, DSA *x);
\&
+\& EVP_PKEY *PEM_read_bio_Parameters(BIO *bp, EVP_PKEY **x);
+\& int PEM_write_bio_Parameters(BIO *bp, const EVP_PKEY *x);
+\&
\& DSA *PEM_read_bio_DSAparams(BIO *bp, DSA **x, pem_password_cb *cb, void *u);
\& DSA *PEM_read_DSAparams(FILE *fp, DSA **x, pem_password_cb *cb, void *u);
\& int PEM_write_bio_DSAparams(BIO *bp, DSA *x);
@@ -331,6 +334,12 @@ a \s-1DSA\s0 structure. The public key is encoded using a
SubjectPublicKeyInfo structure and an error occurs if the public
key is not \s-1DSA.\s0
.PP
+The \fBParameters\fR functions read or write key parameters in \s-1PEM\s0 format using
+an \s-1EVP_PKEY\s0 structure. The encoding depends on the type of key; for \s-1DSA\s0 key
+parameters, it will be a Dss-Parms structure as defined in \s-1RFC2459,\s0 and for \s-1DH\s0
+key parameters, it will be a PKCS#3 DHparameter structure. \fIThese functions
+only exist for the \f(BI\s-1BIO\s0\fI type\fR.
+.PP
The \fBDSAparams\fR functions process \s-1DSA\s0 parameters using a \s-1DSA\s0
structure. The parameters are encoded using a Dss-Parms structure
as defined in \s-1RFC2459.\s0
diff --git a/secure/lib/libcrypto/man/man3/PEM_read_bio_ex.3 b/secure/lib/libcrypto/man/man3/PEM_read_bio_ex.3
index f70f75118bc0d..79cf4c76e0c42 100644
--- a/secure/lib/libcrypto/man/man3/PEM_read_bio_ex.3
+++ b/secure/lib/libcrypto/man/man3/PEM_read_bio_ex.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "PEM_READ_BIO_EX 3"
-.TH PEM_READ_BIO_EX 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH PEM_READ_BIO_EX 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
@@ -185,7 +185,7 @@ If \s-1PEM_FLAG_SECURE\s0 was set, use \fBOPENSSL_secure_free()\fR; otherwise,
\&\fBPEM_read_bio_ex()\fR returns 1 for success or 0 for failure.
.SH "SEE ALSO"
.IX Header "SEE ALSO"
-\&\s-1\fBPEM\s0\fR\|(3)
+\&\fBPEM_bytes_read_bio\fR\|(3)
.SH "HISTORY"
.IX Header "HISTORY"
The \fBPEM_read_bio_ex()\fR function was added in OpenSSL 1.1.1.
diff --git a/secure/lib/libcrypto/man/man3/PEM_write_bio_CMS_stream.3 b/secure/lib/libcrypto/man/man3/PEM_write_bio_CMS_stream.3
index 19c4dab8d38a3..2782312e982cd 100644
--- a/secure/lib/libcrypto/man/man3/PEM_write_bio_CMS_stream.3
+++ b/secure/lib/libcrypto/man/man3/PEM_write_bio_CMS_stream.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "PEM_WRITE_BIO_CMS_STREAM 3"
-.TH PEM_WRITE_BIO_CMS_STREAM 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH PEM_WRITE_BIO_CMS_STREAM 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/PEM_write_bio_PKCS7_stream.3 b/secure/lib/libcrypto/man/man3/PEM_write_bio_PKCS7_stream.3
index ec4423998ddac..f2706704ee6dd 100644
--- a/secure/lib/libcrypto/man/man3/PEM_write_bio_PKCS7_stream.3
+++ b/secure/lib/libcrypto/man/man3/PEM_write_bio_PKCS7_stream.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "PEM_WRITE_BIO_PKCS7_STREAM 3"
-.TH PEM_WRITE_BIO_PKCS7_STREAM 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH PEM_WRITE_BIO_PKCS7_STREAM 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/PKCS12_create.3 b/secure/lib/libcrypto/man/man3/PKCS12_create.3
index 6bb17b22ac78e..35d8609b854e4 100644
--- a/secure/lib/libcrypto/man/man3/PKCS12_create.3
+++ b/secure/lib/libcrypto/man/man3/PKCS12_create.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "PKCS12_CREATE 3"
-.TH PKCS12_CREATE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH PKCS12_CREATE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/PKCS12_newpass.3 b/secure/lib/libcrypto/man/man3/PKCS12_newpass.3
index ee2f83dab2e1e..5757684a61057 100644
--- a/secure/lib/libcrypto/man/man3/PKCS12_newpass.3
+++ b/secure/lib/libcrypto/man/man3/PKCS12_newpass.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "PKCS12_NEWPASS 3"
-.TH PKCS12_NEWPASS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH PKCS12_NEWPASS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/PKCS12_parse.3 b/secure/lib/libcrypto/man/man3/PKCS12_parse.3
index b4859dffd6fde..03719f6623446 100644
--- a/secure/lib/libcrypto/man/man3/PKCS12_parse.3
+++ b/secure/lib/libcrypto/man/man3/PKCS12_parse.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "PKCS12_PARSE 3"
-.TH PKCS12_PARSE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH PKCS12_PARSE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/PKCS5_PBKDF2_HMAC.3 b/secure/lib/libcrypto/man/man3/PKCS5_PBKDF2_HMAC.3
index 6dad8bbcb7ea3..61a5606c94269 100644
--- a/secure/lib/libcrypto/man/man3/PKCS5_PBKDF2_HMAC.3
+++ b/secure/lib/libcrypto/man/man3/PKCS5_PBKDF2_HMAC.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "PKCS5_PBKDF2_HMAC 3"
-.TH PKCS5_PBKDF2_HMAC 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH PKCS5_PBKDF2_HMAC 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/PKCS7_decrypt.3 b/secure/lib/libcrypto/man/man3/PKCS7_decrypt.3
index fe851a464d0c1..b8e8b9a5eb873 100644
--- a/secure/lib/libcrypto/man/man3/PKCS7_decrypt.3
+++ b/secure/lib/libcrypto/man/man3/PKCS7_decrypt.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "PKCS7_DECRYPT 3"
-.TH PKCS7_DECRYPT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH PKCS7_DECRYPT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/PKCS7_encrypt.3 b/secure/lib/libcrypto/man/man3/PKCS7_encrypt.3
index 32925ae972bdd..5e103b3938ed6 100644
--- a/secure/lib/libcrypto/man/man3/PKCS7_encrypt.3
+++ b/secure/lib/libcrypto/man/man3/PKCS7_encrypt.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "PKCS7_ENCRYPT 3"
-.TH PKCS7_ENCRYPT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH PKCS7_ENCRYPT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/PKCS7_sign.3 b/secure/lib/libcrypto/man/man3/PKCS7_sign.3
index 68ff3757dc525..16f4302493405 100644
--- a/secure/lib/libcrypto/man/man3/PKCS7_sign.3
+++ b/secure/lib/libcrypto/man/man3/PKCS7_sign.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "PKCS7_SIGN 3"
-.TH PKCS7_SIGN 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH PKCS7_SIGN 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/PKCS7_sign_add_signer.3 b/secure/lib/libcrypto/man/man3/PKCS7_sign_add_signer.3
index 0383e9379c4a9..e67245282835d 100644
--- a/secure/lib/libcrypto/man/man3/PKCS7_sign_add_signer.3
+++ b/secure/lib/libcrypto/man/man3/PKCS7_sign_add_signer.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "PKCS7_SIGN_ADD_SIGNER 3"
-.TH PKCS7_SIGN_ADD_SIGNER 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH PKCS7_SIGN_ADD_SIGNER 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/PKCS7_verify.3 b/secure/lib/libcrypto/man/man3/PKCS7_verify.3
index f4e703cabf27f..df5f4126d331e 100644
--- a/secure/lib/libcrypto/man/man3/PKCS7_verify.3
+++ b/secure/lib/libcrypto/man/man3/PKCS7_verify.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "PKCS7_VERIFY 3"
-.TH PKCS7_VERIFY 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH PKCS7_VERIFY 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/RAND_DRBG_generate.3 b/secure/lib/libcrypto/man/man3/RAND_DRBG_generate.3
index 5fe6de9269402..7e2f94708ea3d 100644
--- a/secure/lib/libcrypto/man/man3/RAND_DRBG_generate.3
+++ b/secure/lib/libcrypto/man/man3/RAND_DRBG_generate.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "RAND_DRBG_GENERATE 3"
-.TH RAND_DRBG_GENERATE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH RAND_DRBG_GENERATE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/RAND_DRBG_get0_master.3 b/secure/lib/libcrypto/man/man3/RAND_DRBG_get0_master.3
index 06ce851352191..3690faba143a8 100644
--- a/secure/lib/libcrypto/man/man3/RAND_DRBG_get0_master.3
+++ b/secure/lib/libcrypto/man/man3/RAND_DRBG_get0_master.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "RAND_DRBG_GET0_MASTER 3"
-.TH RAND_DRBG_GET0_MASTER 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH RAND_DRBG_GET0_MASTER 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/RAND_DRBG_new.3 b/secure/lib/libcrypto/man/man3/RAND_DRBG_new.3
index 592206cfd92a6..51bf73c6b8911 100644
--- a/secure/lib/libcrypto/man/man3/RAND_DRBG_new.3
+++ b/secure/lib/libcrypto/man/man3/RAND_DRBG_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "RAND_DRBG_NEW 3"
-.TH RAND_DRBG_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH RAND_DRBG_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/RAND_DRBG_reseed.3 b/secure/lib/libcrypto/man/man3/RAND_DRBG_reseed.3
index 06925afeb2180..13b3bfd9da493 100644
--- a/secure/lib/libcrypto/man/man3/RAND_DRBG_reseed.3
+++ b/secure/lib/libcrypto/man/man3/RAND_DRBG_reseed.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "RAND_DRBG_RESEED 3"
-.TH RAND_DRBG_RESEED 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH RAND_DRBG_RESEED 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/RAND_DRBG_set_callbacks.3 b/secure/lib/libcrypto/man/man3/RAND_DRBG_set_callbacks.3
index 49a765968cebb..a593d99c33209 100644
--- a/secure/lib/libcrypto/man/man3/RAND_DRBG_set_callbacks.3
+++ b/secure/lib/libcrypto/man/man3/RAND_DRBG_set_callbacks.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "RAND_DRBG_SET_CALLBACKS 3"
-.TH RAND_DRBG_SET_CALLBACKS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH RAND_DRBG_SET_CALLBACKS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/RAND_DRBG_set_ex_data.3 b/secure/lib/libcrypto/man/man3/RAND_DRBG_set_ex_data.3
index cd94b4a2af1b5..db2116b13c92c 100644
--- a/secure/lib/libcrypto/man/man3/RAND_DRBG_set_ex_data.3
+++ b/secure/lib/libcrypto/man/man3/RAND_DRBG_set_ex_data.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "RAND_DRBG_SET_EX_DATA 3"
-.TH RAND_DRBG_SET_EX_DATA 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH RAND_DRBG_SET_EX_DATA 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/RAND_add.3 b/secure/lib/libcrypto/man/man3/RAND_add.3
index d7d2cd0a64363..983cdf701acc4 100644
--- a/secure/lib/libcrypto/man/man3/RAND_add.3
+++ b/secure/lib/libcrypto/man/man3/RAND_add.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "RAND_ADD 3"
-.TH RAND_ADD 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH RAND_ADD 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/RAND_bytes.3 b/secure/lib/libcrypto/man/man3/RAND_bytes.3
index a90138882f5de..5863c381a42d5 100644
--- a/secure/lib/libcrypto/man/man3/RAND_bytes.3
+++ b/secure/lib/libcrypto/man/man3/RAND_bytes.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "RAND_BYTES 3"
-.TH RAND_BYTES 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH RAND_BYTES 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
@@ -158,8 +158,8 @@ Deprecated:
.Ve
.SH "DESCRIPTION"
.IX Header "DESCRIPTION"
-\&\fBRAND_bytes()\fR puts \fBnum\fR cryptographically strong pseudo-random bytes
-into \fBbuf\fR.
+\&\fBRAND_bytes()\fR generates \fBnum\fR random bytes using a cryptographically
+secure pseudo random generator (\s-1CSPRNG\s0) and stores them in \fBbuf\fR.
.PP
\&\fBRAND_priv_bytes()\fR has the same semantics as \fBRAND_bytes()\fR. It is intended to
be used for generating values that should remain private. If using the
@@ -169,10 +169,22 @@ affect the secrecy of these private values, as described in \s-1\fBRAND\s0\fR\|(
and \s-1\fBRAND_DRBG\s0\fR\|(7).
.SH "NOTES"
.IX Header "NOTES"
-Always check the error return value of \fBRAND_bytes()\fR and
-\&\fBRAND_priv_bytes()\fR and do not take randomness for granted: an error occurs
-if the \s-1CSPRNG\s0 has not been seeded with enough randomness to ensure an
-unpredictable byte sequence.
+By default, the OpenSSL \s-1CSPRNG\s0 supports a security level of 256 bits, provided it
+was able to seed itself from a trusted entropy source.
+On all major platforms supported by OpenSSL (including the Unix-like platforms
+and Windows), OpenSSL is configured to automatically seed the \s-1CSPRNG\s0 on first use
+using the operating systems's random generator.
+.PP
+If the entropy source fails or is not available, the \s-1CSPRNG\s0 will enter an
+error state and refuse to generate random bytes. For that reason, it is important
+to always check the error return value of \fBRAND_bytes()\fR and \fBRAND_priv_bytes()\fR and
+not take randomness for granted.
+.PP
+On other platforms, there might not be a trusted entropy source available
+or OpenSSL might have been explicitly configured to use different entropy sources.
+If you are in doubt about the quality of the entropy source, don't hesitate to ask
+your operating system vendor or post a question on GitHub or the openssl-users
+mailing list.
.SH "RETURN VALUES"
.IX Header "RETURN VALUES"
\&\fBRAND_bytes()\fR and \fBRAND_priv_bytes()\fR
@@ -195,7 +207,7 @@ obtained by \fBERR_get_error\fR\|(3).
The \fBRAND_priv_bytes()\fR function was added in OpenSSL 1.1.1.
.SH "COPYRIGHT"
.IX Header "COPYRIGHT"
-Copyright 2000\-2019 The OpenSSL Project Authors. All Rights Reserved.
+Copyright 2000\-2020 The OpenSSL Project Authors. All Rights Reserved.
.PP
Licensed under the OpenSSL license (the \*(L"License\*(R"). You may not use
this file except in compliance with the License. You can obtain a copy
diff --git a/secure/lib/libcrypto/man/man3/RAND_cleanup.3 b/secure/lib/libcrypto/man/man3/RAND_cleanup.3
index 8a4c7f7d035f1..ef0e1441bbe71 100644
--- a/secure/lib/libcrypto/man/man3/RAND_cleanup.3
+++ b/secure/lib/libcrypto/man/man3/RAND_cleanup.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "RAND_CLEANUP 3"
-.TH RAND_CLEANUP 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH RAND_CLEANUP 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/RAND_egd.3 b/secure/lib/libcrypto/man/man3/RAND_egd.3
index 17cc7cee6732a..2138e9ab529ae 100644
--- a/secure/lib/libcrypto/man/man3/RAND_egd.3
+++ b/secure/lib/libcrypto/man/man3/RAND_egd.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "RAND_EGD 3"
-.TH RAND_EGD 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH RAND_EGD 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/RAND_load_file.3 b/secure/lib/libcrypto/man/man3/RAND_load_file.3
index 07bf78761f4ad..f0bafbe08742a 100644
--- a/secure/lib/libcrypto/man/man3/RAND_load_file.3
+++ b/secure/lib/libcrypto/man/man3/RAND_load_file.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "RAND_LOAD_FILE 3"
-.TH RAND_LOAD_FILE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH RAND_LOAD_FILE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/RAND_set_rand_method.3 b/secure/lib/libcrypto/man/man3/RAND_set_rand_method.3
index e9c414f8e7da1..b8d291d8ff8b1 100644
--- a/secure/lib/libcrypto/man/man3/RAND_set_rand_method.3
+++ b/secure/lib/libcrypto/man/man3/RAND_set_rand_method.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "RAND_SET_RAND_METHOD 3"
-.TH RAND_SET_RAND_METHOD 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH RAND_SET_RAND_METHOD 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
@@ -185,7 +185,7 @@ and \fBRAND_status()\fR.
Each pointer may be \s-1NULL\s0 if the function is not implemented.
.SH "RETURN VALUES"
.IX Header "RETURN VALUES"
-\&\fBRAND_set_rand_method()\fR returns 1 on success and 0 on failue.
+\&\fBRAND_set_rand_method()\fR returns 1 on success and 0 on failure.
\&\fBRAND_get_rand_method()\fR and \fBRAND_OpenSSL()\fR return pointers to the respective
methods.
.SH "SEE ALSO"
diff --git a/secure/lib/libcrypto/man/man3/RC4_set_key.3 b/secure/lib/libcrypto/man/man3/RC4_set_key.3
index e2ed19a87ec65..b3d97b5ffcace 100644
--- a/secure/lib/libcrypto/man/man3/RC4_set_key.3
+++ b/secure/lib/libcrypto/man/man3/RC4_set_key.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "RC4_SET_KEY 3"
-.TH RC4_SET_KEY 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH RC4_SET_KEY 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/RIPEMD160_Init.3 b/secure/lib/libcrypto/man/man3/RIPEMD160_Init.3
index 1de93985960bf..dac90623b635b 100644
--- a/secure/lib/libcrypto/man/man3/RIPEMD160_Init.3
+++ b/secure/lib/libcrypto/man/man3/RIPEMD160_Init.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "RIPEMD160_INIT 3"
-.TH RIPEMD160_INIT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH RIPEMD160_INIT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/RSA_blinding_on.3 b/secure/lib/libcrypto/man/man3/RSA_blinding_on.3
index eb950af7ce6bb..fe4bc6504bbb1 100644
--- a/secure/lib/libcrypto/man/man3/RSA_blinding_on.3
+++ b/secure/lib/libcrypto/man/man3/RSA_blinding_on.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "RSA_BLINDING_ON 3"
-.TH RSA_BLINDING_ON 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH RSA_BLINDING_ON 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/RSA_check_key.3 b/secure/lib/libcrypto/man/man3/RSA_check_key.3
index 1044c5dcc1901..7a76153b678d0 100644
--- a/secure/lib/libcrypto/man/man3/RSA_check_key.3
+++ b/secure/lib/libcrypto/man/man3/RSA_check_key.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "RSA_CHECK_KEY 3"
-.TH RSA_CHECK_KEY 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH RSA_CHECK_KEY 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/RSA_generate_key.3 b/secure/lib/libcrypto/man/man3/RSA_generate_key.3
index 5abf9b196a708..bee7b7bc89f89 100644
--- a/secure/lib/libcrypto/man/man3/RSA_generate_key.3
+++ b/secure/lib/libcrypto/man/man3/RSA_generate_key.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "RSA_GENERATE_KEY 3"
-.TH RSA_GENERATE_KEY 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH RSA_GENERATE_KEY 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/RSA_get0_key.3 b/secure/lib/libcrypto/man/man3/RSA_get0_key.3
index 036edf25a1389..471d64a654013 100644
--- a/secure/lib/libcrypto/man/man3/RSA_get0_key.3
+++ b/secure/lib/libcrypto/man/man3/RSA_get0_key.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,13 +133,13 @@
.\" ========================================================================
.\"
.IX Title "RSA_GET0_KEY 3"
-.TH RSA_GET0_KEY 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH RSA_GET0_KEY 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
.nh
.SH "NAME"
-RSA_set0_key, RSA_set0_factors, RSA_set0_crt_params, RSA_get0_key, RSA_get0_factors, RSA_get0_crt_params, RSA_get0_n, RSA_get0_e, RSA_get0_d, RSA_get0_p, RSA_get0_q, RSA_get0_dmp1, RSA_get0_dmq1, RSA_get0_iqmp, RSA_clear_flags, RSA_test_flags, RSA_set_flags, RSA_get0_engine, RSA_get_multi_prime_extra_count, RSA_get0_multi_prime_factors, RSA_get0_multi_prime_crt_params, RSA_set0_multi_prime_params, RSA_get_version \&\- Routines for getting and setting data in an RSA object
+RSA_set0_key, RSA_set0_factors, RSA_set0_crt_params, RSA_get0_key, RSA_get0_factors, RSA_get0_crt_params, RSA_get0_n, RSA_get0_e, RSA_get0_d, RSA_get0_p, RSA_get0_q, RSA_get0_dmp1, RSA_get0_dmq1, RSA_get0_iqmp, RSA_get0_pss_params, RSA_clear_flags, RSA_test_flags, RSA_set_flags, RSA_get0_engine, RSA_get_multi_prime_extra_count, RSA_get0_multi_prime_factors, RSA_get0_multi_prime_crt_params, RSA_set0_multi_prime_params, RSA_get_version \&\- Routines for getting and setting data in an RSA object
.SH "SYNOPSIS"
.IX Header "SYNOPSIS"
.Vb 1
@@ -162,6 +162,7 @@ RSA_set0_key, RSA_set0_factors, RSA_set0_crt_params, RSA_get0_key, RSA_get0_fact
\& const BIGNUM *RSA_get0_dmp1(const RSA *r);
\& const BIGNUM *RSA_get0_dmq1(const RSA *r);
\& const BIGNUM *RSA_get0_iqmp(const RSA *r);
+\& const RSA_PSS_PARAMS *RSA_get0_pss_params(const RSA *r);
\& void RSA_clear_flags(RSA *r, int flags);
\& int RSA_test_flags(const RSA *r, int flags);
\& void RSA_set_flags(RSA *r, int flags);
@@ -227,6 +228,8 @@ retrieved separately by the corresponding function
\&\fBRSA_get0_n()\fR, \fBRSA_get0_e()\fR, \fBRSA_get0_d()\fR, \fBRSA_get0_p()\fR, \fBRSA_get0_q()\fR,
\&\fBRSA_get0_dmp1()\fR, \fBRSA_get0_dmq1()\fR, and \fBRSA_get0_iqmp()\fR, respectively.
.PP
+\&\fBRSA_get0_pss_params()\fR is used to retrieve the RSA-PSS parameters.
+.PP
\&\fBRSA_set_flags()\fR sets the flags in the \fBflags\fR parameter on the \s-1RSA\s0
object. Multiple flags can be passed in one go (bitwise ORed together).
Any flags that are already set are left set. \fBRSA_test_flags()\fR tests to
@@ -282,6 +285,8 @@ multi-prime \s-1RSA.\s0
\&\fBRSA_new\fR\|(3), \fBRSA_size\fR\|(3)
.SH "HISTORY"
.IX Header "HISTORY"
+The \fBRSA_get0_pss_params()\fR function was added in OpenSSL 1.1.1e.
+.PP
The
\&\fBRSA_get_multi_prime_extra_count()\fR, \fBRSA_get0_multi_prime_factors()\fR,
\&\fBRSA_get0_multi_prime_crt_params()\fR, \fBRSA_set0_multi_prime_params()\fR,
diff --git a/secure/lib/libcrypto/man/man3/RSA_meth_new.3 b/secure/lib/libcrypto/man/man3/RSA_meth_new.3
index 78f324356590d..057fa606d621f 100644
--- a/secure/lib/libcrypto/man/man3/RSA_meth_new.3
+++ b/secure/lib/libcrypto/man/man3/RSA_meth_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "RSA_METH_NEW 3"
-.TH RSA_METH_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH RSA_METH_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/RSA_new.3 b/secure/lib/libcrypto/man/man3/RSA_new.3
index fe37bcc9d2fe6..985af87ddaad8 100644
--- a/secure/lib/libcrypto/man/man3/RSA_new.3
+++ b/secure/lib/libcrypto/man/man3/RSA_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "RSA_NEW 3"
-.TH RSA_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH RSA_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/RSA_padding_add_PKCS1_type_1.3 b/secure/lib/libcrypto/man/man3/RSA_padding_add_PKCS1_type_1.3
index 419fe86f04450..858bd3e159308 100644
--- a/secure/lib/libcrypto/man/man3/RSA_padding_add_PKCS1_type_1.3
+++ b/secure/lib/libcrypto/man/man3/RSA_padding_add_PKCS1_type_1.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "RSA_PADDING_ADD_PKCS1_TYPE_1 3"
-.TH RSA_PADDING_ADD_PKCS1_TYPE_1 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH RSA_PADDING_ADD_PKCS1_TYPE_1 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/RSA_print.3 b/secure/lib/libcrypto/man/man3/RSA_print.3
index cd740a5a73cdb..f479d3729738e 100644
--- a/secure/lib/libcrypto/man/man3/RSA_print.3
+++ b/secure/lib/libcrypto/man/man3/RSA_print.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "RSA_PRINT 3"
-.TH RSA_PRINT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH RSA_PRINT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/RSA_private_encrypt.3 b/secure/lib/libcrypto/man/man3/RSA_private_encrypt.3
index 9fb76a68059a9..2364538ba5f11 100644
--- a/secure/lib/libcrypto/man/man3/RSA_private_encrypt.3
+++ b/secure/lib/libcrypto/man/man3/RSA_private_encrypt.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "RSA_PRIVATE_ENCRYPT 3"
-.TH RSA_PRIVATE_ENCRYPT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH RSA_PRIVATE_ENCRYPT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/RSA_public_encrypt.3 b/secure/lib/libcrypto/man/man3/RSA_public_encrypt.3
index a58a81bea2897..a96e0411ba2d3 100644
--- a/secure/lib/libcrypto/man/man3/RSA_public_encrypt.3
+++ b/secure/lib/libcrypto/man/man3/RSA_public_encrypt.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "RSA_PUBLIC_ENCRYPT 3"
-.TH RSA_PUBLIC_ENCRYPT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH RSA_PUBLIC_ENCRYPT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/RSA_set_method.3 b/secure/lib/libcrypto/man/man3/RSA_set_method.3
index 5a87cc5e1610f..574db2811644e 100644
--- a/secure/lib/libcrypto/man/man3/RSA_set_method.3
+++ b/secure/lib/libcrypto/man/man3/RSA_set_method.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "RSA_SET_METHOD 3"
-.TH RSA_SET_METHOD 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH RSA_SET_METHOD 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/RSA_sign.3 b/secure/lib/libcrypto/man/man3/RSA_sign.3
index edb5431df9736..89af93275da77 100644
--- a/secure/lib/libcrypto/man/man3/RSA_sign.3
+++ b/secure/lib/libcrypto/man/man3/RSA_sign.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "RSA_SIGN 3"
-.TH RSA_SIGN 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH RSA_SIGN 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/RSA_sign_ASN1_OCTET_STRING.3 b/secure/lib/libcrypto/man/man3/RSA_sign_ASN1_OCTET_STRING.3
index e4c5e8b12e668..759b88ab50d1c 100644
--- a/secure/lib/libcrypto/man/man3/RSA_sign_ASN1_OCTET_STRING.3
+++ b/secure/lib/libcrypto/man/man3/RSA_sign_ASN1_OCTET_STRING.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "RSA_SIGN_ASN1_OCTET_STRING 3"
-.TH RSA_SIGN_ASN1_OCTET_STRING 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH RSA_SIGN_ASN1_OCTET_STRING 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/RSA_size.3 b/secure/lib/libcrypto/man/man3/RSA_size.3
index 023b799d556bf..c35224a7de941 100644
--- a/secure/lib/libcrypto/man/man3/RSA_size.3
+++ b/secure/lib/libcrypto/man/man3/RSA_size.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "RSA_SIZE 3"
-.TH RSA_SIZE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH RSA_SIZE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SCT_new.3 b/secure/lib/libcrypto/man/man3/SCT_new.3
index 268e489a840c7..f7cfc256dd706 100644
--- a/secure/lib/libcrypto/man/man3/SCT_new.3
+++ b/secure/lib/libcrypto/man/man3/SCT_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SCT_NEW 3"
-.TH SCT_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SCT_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SCT_print.3 b/secure/lib/libcrypto/man/man3/SCT_print.3
index 9fba55dcd2cae..3aaf167c8d187 100644
--- a/secure/lib/libcrypto/man/man3/SCT_print.3
+++ b/secure/lib/libcrypto/man/man3/SCT_print.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SCT_PRINT 3"
-.TH SCT_PRINT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SCT_PRINT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SCT_validate.3 b/secure/lib/libcrypto/man/man3/SCT_validate.3
index 44c7608323420..0d9e348e39bb2 100644
--- a/secure/lib/libcrypto/man/man3/SCT_validate.3
+++ b/secure/lib/libcrypto/man/man3/SCT_validate.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SCT_VALIDATE 3"
-.TH SCT_VALIDATE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SCT_VALIDATE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SHA256_Init.3 b/secure/lib/libcrypto/man/man3/SHA256_Init.3
index 8e07cce30a807..795a742e60255 100644
--- a/secure/lib/libcrypto/man/man3/SHA256_Init.3
+++ b/secure/lib/libcrypto/man/man3/SHA256_Init.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SHA256_INIT 3"
-.TH SHA256_INIT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SHA256_INIT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SMIME_read_CMS.3 b/secure/lib/libcrypto/man/man3/SMIME_read_CMS.3
index 3dc53482cd07b..de7e31e6e6abc 100644
--- a/secure/lib/libcrypto/man/man3/SMIME_read_CMS.3
+++ b/secure/lib/libcrypto/man/man3/SMIME_read_CMS.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SMIME_READ_CMS 3"
-.TH SMIME_READ_CMS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SMIME_READ_CMS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SMIME_read_PKCS7.3 b/secure/lib/libcrypto/man/man3/SMIME_read_PKCS7.3
index ded4ed00e6996..47e4f4376b6b1 100644
--- a/secure/lib/libcrypto/man/man3/SMIME_read_PKCS7.3
+++ b/secure/lib/libcrypto/man/man3/SMIME_read_PKCS7.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SMIME_READ_PKCS7 3"
-.TH SMIME_READ_PKCS7 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SMIME_READ_PKCS7 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SMIME_write_CMS.3 b/secure/lib/libcrypto/man/man3/SMIME_write_CMS.3
index 2a93267c4d92a..898557d2f4929 100644
--- a/secure/lib/libcrypto/man/man3/SMIME_write_CMS.3
+++ b/secure/lib/libcrypto/man/man3/SMIME_write_CMS.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SMIME_WRITE_CMS 3"
-.TH SMIME_WRITE_CMS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SMIME_WRITE_CMS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SMIME_write_PKCS7.3 b/secure/lib/libcrypto/man/man3/SMIME_write_PKCS7.3
index 0d88ddd007b57..1df950d4fd9d7 100644
--- a/secure/lib/libcrypto/man/man3/SMIME_write_PKCS7.3
+++ b/secure/lib/libcrypto/man/man3/SMIME_write_PKCS7.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SMIME_WRITE_PKCS7 3"
-.TH SMIME_WRITE_PKCS7 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SMIME_WRITE_PKCS7 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CIPHER_get_name.3 b/secure/lib/libcrypto/man/man3/SSL_CIPHER_get_name.3
index e82e668e4df85..d08215f0d8280 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CIPHER_get_name.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CIPHER_get_name.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CIPHER_GET_NAME 3"
-.TH SSL_CIPHER_GET_NAME 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CIPHER_GET_NAME 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_COMP_add_compression_method.3 b/secure/lib/libcrypto/man/man3/SSL_COMP_add_compression_method.3
index 5a2b59079f24f..05fda313ae93d 100644
--- a/secure/lib/libcrypto/man/man3/SSL_COMP_add_compression_method.3
+++ b/secure/lib/libcrypto/man/man3/SSL_COMP_add_compression_method.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_COMP_ADD_COMPRESSION_METHOD 3"
-.TH SSL_COMP_ADD_COMPRESSION_METHOD 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_COMP_ADD_COMPRESSION_METHOD 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CONF_CTX_new.3 b/secure/lib/libcrypto/man/man3/SSL_CONF_CTX_new.3
index fb4b99400d353..60f8d1746cd99 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CONF_CTX_new.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CONF_CTX_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CONF_CTX_NEW 3"
-.TH SSL_CONF_CTX_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CONF_CTX_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CONF_CTX_set1_prefix.3 b/secure/lib/libcrypto/man/man3/SSL_CONF_CTX_set1_prefix.3
index df9298ca67506..bf2cc951660ef 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CONF_CTX_set1_prefix.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CONF_CTX_set1_prefix.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CONF_CTX_SET1_PREFIX 3"
-.TH SSL_CONF_CTX_SET1_PREFIX 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CONF_CTX_SET1_PREFIX 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CONF_CTX_set_flags.3 b/secure/lib/libcrypto/man/man3/SSL_CONF_CTX_set_flags.3
index 07db6d80339a8..c9d034800326f 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CONF_CTX_set_flags.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CONF_CTX_set_flags.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CONF_CTX_SET_FLAGS 3"
-.TH SSL_CONF_CTX_SET_FLAGS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CONF_CTX_SET_FLAGS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CONF_CTX_set_ssl_ctx.3 b/secure/lib/libcrypto/man/man3/SSL_CONF_CTX_set_ssl_ctx.3
index c35de3599c944..d8af51e4dd2d4 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CONF_CTX_set_ssl_ctx.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CONF_CTX_set_ssl_ctx.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CONF_CTX_SET_SSL_CTX 3"
-.TH SSL_CONF_CTX_SET_SSL_CTX 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CONF_CTX_SET_SSL_CTX 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CONF_cmd.3 b/secure/lib/libcrypto/man/man3/SSL_CONF_cmd.3
index 928984d31b7ce..6ac9ec14e44ef 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CONF_cmd.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CONF_cmd.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CONF_CMD 3"
-.TH SSL_CONF_CMD 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CONF_CMD 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CONF_cmd_argv.3 b/secure/lib/libcrypto/man/man3/SSL_CONF_cmd_argv.3
index 8df0077f8f7ca..038a1c21a4fa6 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CONF_cmd_argv.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CONF_cmd_argv.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CONF_CMD_ARGV 3"
-.TH SSL_CONF_CMD_ARGV 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CONF_CMD_ARGV 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_add1_chain_cert.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_add1_chain_cert.3
index 824827113bd4f..d563156d297fd 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_add1_chain_cert.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_add1_chain_cert.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_ADD1_CHAIN_CERT 3"
-.TH SSL_CTX_ADD1_CHAIN_CERT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_ADD1_CHAIN_CERT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_add_extra_chain_cert.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_add_extra_chain_cert.3
index 3466d37a64b34..83264f6ed45d2 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_add_extra_chain_cert.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_add_extra_chain_cert.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_ADD_EXTRA_CHAIN_CERT 3"
-.TH SSL_CTX_ADD_EXTRA_CHAIN_CERT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_ADD_EXTRA_CHAIN_CERT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_add_session.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_add_session.3
index 961d612c48956..3dc922af87b23 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_add_session.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_add_session.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_ADD_SESSION 3"
-.TH SSL_CTX_ADD_SESSION 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_ADD_SESSION 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_config.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_config.3
index b66d54e99729e..012b55dd1e7cd 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_config.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_config.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_CONFIG 3"
-.TH SSL_CTX_CONFIG 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_CONFIG 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_ctrl.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_ctrl.3
index be101b014709b..dfa0c5d307574 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_ctrl.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_ctrl.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_CTRL 3"
-.TH SSL_CTX_CTRL 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_CTRL 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_dane_enable.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_dane_enable.3
index 230333d55cd94..901d6bc906e9c 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_dane_enable.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_dane_enable.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_DANE_ENABLE 3"
-.TH SSL_CTX_DANE_ENABLE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_DANE_ENABLE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_flush_sessions.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_flush_sessions.3
index 93c857189752d..a1b0e03cd6941 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_flush_sessions.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_flush_sessions.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_FLUSH_SESSIONS 3"
-.TH SSL_CTX_FLUSH_SESSIONS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_FLUSH_SESSIONS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_free.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_free.3
index a9ede4208d0ac..62e47d31bb09b 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_free.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_free.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_FREE 3"
-.TH SSL_CTX_FREE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_FREE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_get0_param.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_get0_param.3
index 0075c6a566b74..f983aa46f2c3d 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_get0_param.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_get0_param.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_GET0_PARAM 3"
-.TH SSL_CTX_GET0_PARAM 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_GET0_PARAM 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_get_verify_mode.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_get_verify_mode.3
index 14b776021b9c0..7f689472176e5 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_get_verify_mode.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_get_verify_mode.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_GET_VERIFY_MODE 3"
-.TH SSL_CTX_GET_VERIFY_MODE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_GET_VERIFY_MODE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_has_client_custom_ext.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_has_client_custom_ext.3
index bc89d874942f5..c36658e8647b9 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_has_client_custom_ext.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_has_client_custom_ext.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_HAS_CLIENT_CUSTOM_EXT 3"
-.TH SSL_CTX_HAS_CLIENT_CUSTOM_EXT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_HAS_CLIENT_CUSTOM_EXT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_load_verify_locations.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_load_verify_locations.3
index 47c67cc2f572c..1a427f8c2f3d5 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_load_verify_locations.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_load_verify_locations.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_LOAD_VERIFY_LOCATIONS 3"
-.TH SSL_CTX_LOAD_VERIFY_LOCATIONS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_LOAD_VERIFY_LOCATIONS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_new.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_new.3
index 6126859c8104e..c645022c43cfd 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_new.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_NEW 3"
-.TH SSL_CTX_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_sess_number.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_sess_number.3
index be8aa4da19a28..a0cb9e5cf71e2 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_sess_number.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_sess_number.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SESS_NUMBER 3"
-.TH SSL_CTX_SESS_NUMBER 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SESS_NUMBER 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_sess_set_cache_size.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_sess_set_cache_size.3
index 54da3301e959e..00a15b2ddf1e4 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_sess_set_cache_size.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_sess_set_cache_size.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SESS_SET_CACHE_SIZE 3"
-.TH SSL_CTX_SESS_SET_CACHE_SIZE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SESS_SET_CACHE_SIZE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_sess_set_get_cb.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_sess_set_get_cb.3
index 6e612b67cbfa2..10309839821e6 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_sess_set_get_cb.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_sess_set_get_cb.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SESS_SET_GET_CB 3"
-.TH SSL_CTX_SESS_SET_GET_CB 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SESS_SET_GET_CB 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
@@ -165,19 +165,19 @@ SSL_CTX_sess_set_new_cb, SSL_CTX_sess_set_remove_cb, SSL_CTX_sess_set_get_cb, SS
.Ve
.SH "DESCRIPTION"
.IX Header "DESCRIPTION"
-\&\fBSSL_CTX_sess_set_new_cb()\fR sets the callback function, which is automatically
+\&\fBSSL_CTX_sess_set_new_cb()\fR sets the callback function that is
called whenever a new session was negotiated.
.PP
-\&\fBSSL_CTX_sess_set_remove_cb()\fR sets the callback function, which is
-automatically called whenever a session is removed by the \s-1SSL\s0 engine,
-because it is considered faulty or the session has become obsolete because
-of exceeding the timeout value.
+\&\fBSSL_CTX_sess_set_remove_cb()\fR sets the callback function that is
+called whenever a session is removed by the \s-1SSL\s0 engine. For example,
+this can occur because a session is considered faulty or has become obsolete
+because of exceeding the timeout value.
.PP
-\&\fBSSL_CTX_sess_set_get_cb()\fR sets the callback function which is called,
-whenever a \s-1SSL/TLS\s0 client proposed to resume a session but the session
+\&\fBSSL_CTX_sess_set_get_cb()\fR sets the callback function that is called
+whenever a \s-1TLS\s0 client proposed to resume a session but the session
could not be found in the internal session cache (see
\&\fBSSL_CTX_set_session_cache_mode\fR\|(3)).
-(\s-1SSL/TLS\s0 server only.)
+(\s-1TLS\s0 server only.)
.PP
\&\fBSSL_CTX_sess_get_new_cb()\fR, \fBSSL_CTX_sess_get_remove_cb()\fR, and
\&\fBSSL_CTX_sess_get_get_cb()\fR retrieve the function pointers set by the
@@ -190,12 +190,19 @@ session cache is realized via callback functions. Inside these callback
functions, session can be saved to disk or put into a database using the
\&\fBd2i_SSL_SESSION\fR\|(3) interface.
.PP
-The \fBnew_session_cb()\fR is called, whenever a new session has been negotiated
-and session caching is enabled (see
-\&\fBSSL_CTX_set_session_cache_mode\fR\|(3)).
-The \fBnew_session_cb()\fR is passed the \fBssl\fR connection and the ssl session
-\&\fBsess\fR. If the callback returns \fB0\fR, the session will be immediately
-removed again. Note that in TLSv1.3, sessions are established after the main
+The \fBnew_session_cb()\fR is called whenever a new session has been negotiated and
+session caching is enabled (see \fBSSL_CTX_set_session_cache_mode\fR\|(3)). The
+\&\fBnew_session_cb()\fR is passed the \fBssl\fR connection and the nascent
+ssl session \fBsess\fR.
+Since sessions are reference-counted objects, the reference count on the
+session is incremented before the callback, on behalf of the application. If
+the callback returns \fB0\fR, the session will be immediately removed from the
+internal cache and the reference count released. If the callback returns \fB1\fR,
+the application retains the reference (for an entry in the
+application-maintained \*(L"external session cache\*(R"), and is responsible for
+calling \fBSSL_SESSION_free()\fR when the session reference is no longer in use.
+.PP
+Note that in TLSv1.3, sessions are established after the main
handshake has completed. The server decides when to send the client the session
information and this may occur some time after the end of the handshake (or not
at all). This means that applications should expect the \fBnew_session_cb()\fR
@@ -208,21 +215,23 @@ In TLSv1.3 it is recommended that each \s-1SSL_SESSION\s0 object is only used fo
resumption once. One way of enforcing that is for applications to call
\&\fBSSL_CTX_remove_session\fR\|(3) after a session has been used.
.PP
-The \fBremove_session_cb()\fR is called, whenever the \s-1SSL\s0 engine removes a session
-from the internal cache. This happens when the session is removed because
+The \fBremove_session_cb()\fR is called whenever the \s-1SSL\s0 engine removes a session
+from the internal cache. This can happen when the session is removed because
it is expired or when a connection was not shutdown cleanly. It also happens
for all sessions in the internal session cache when
\&\fBSSL_CTX_free\fR\|(3) is called. The \fBremove_session_cb()\fR is passed
the \fBctx\fR and the ssl session \fBsess\fR. It does not provide any feedback.
.PP
-The \fBget_session_cb()\fR is only called on \s-1SSL/TLS\s0 servers with the session id
-proposed by the client. The \fBget_session_cb()\fR is always called, also when
+The \fBget_session_cb()\fR is only called on \s-1SSL/TLS\s0 servers, and is given
+the session id
+proposed by the client. The \fBget_session_cb()\fR is always called, even when
session caching was disabled. The \fBget_session_cb()\fR is passed the
-\&\fBssl\fR connection, the session id of length \fBlength\fR at the memory location
-\&\fBdata\fR. With the parameter \fBcopy\fR the callback can require the
-\&\s-1SSL\s0 engine to increment the reference count of the \s-1SSL_SESSION\s0 object,
-Normally the reference count is not incremented and therefore the
-session must not be explicitly freed with
+\&\fBssl\fR connection and the session id of length \fBlength\fR at the memory location
+\&\fBdata\fR. By setting the parameter \fBcopy\fR to \fB1\fR, the callback can require the
+\&\s-1SSL\s0 engine to increment the reference count of the \s-1SSL_SESSION\s0 object;
+setting \fBcopy\fR to \fB0\fR causes the reference count to remain unchanged.
+If the \fBget_session_cb()\fR does not write to \fBcopy\fR, the reference count
+is incremented and the session must be explicitly freed with
\&\fBSSL_SESSION_free\fR\|(3).
.SH "RETURN VALUES"
.IX Header "RETURN VALUES"
@@ -237,7 +246,7 @@ return different callback function pointers respectively.
\&\fBSSL_CTX_free\fR\|(3)
.SH "COPYRIGHT"
.IX Header "COPYRIGHT"
-Copyright 2001\-2018 The OpenSSL Project Authors. All Rights Reserved.
+Copyright 2001\-2020 The OpenSSL Project Authors. All Rights Reserved.
.PP
Licensed under the OpenSSL license (the \*(L"License\*(R"). You may not use
this file except in compliance with the License. You can obtain a copy
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_sessions.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_sessions.3
index d20de0f09b21d..72251dd2f8815 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_sessions.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_sessions.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SESSIONS 3"
-.TH SSL_CTX_SESSIONS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SESSIONS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set0_CA_list.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set0_CA_list.3
index f123d0a00359a..314aba61ae989 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set0_CA_list.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set0_CA_list.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET0_CA_LIST 3"
-.TH SSL_CTX_SET0_CA_LIST 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET0_CA_LIST 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set1_curves.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set1_curves.3
index 7999810c736d9..def326added70 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set1_curves.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set1_curves.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET1_CURVES 3"
-.TH SSL_CTX_SET1_CURVES 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET1_CURVES 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set1_sigalgs.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set1_sigalgs.3
index 9dd61cd9bc974..69ae8a494f112 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set1_sigalgs.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set1_sigalgs.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET1_SIGALGS 3"
-.TH SSL_CTX_SET1_SIGALGS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET1_SIGALGS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set1_verify_cert_store.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set1_verify_cert_store.3
index ecae59de70f0c..03db0313f45d8 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set1_verify_cert_store.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set1_verify_cert_store.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET1_VERIFY_CERT_STORE 3"
-.TH SSL_CTX_SET1_VERIFY_CERT_STORE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET1_VERIFY_CERT_STORE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set_alpn_select_cb.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set_alpn_select_cb.3
index 6bfbd20db37d5..8ab8a7650db9a 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set_alpn_select_cb.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set_alpn_select_cb.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET_ALPN_SELECT_CB 3"
-.TH SSL_CTX_SET_ALPN_SELECT_CB 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET_ALPN_SELECT_CB 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set_cert_cb.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set_cert_cb.3
index b2cee6af31bb4..6be3bcd569c7c 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set_cert_cb.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set_cert_cb.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET_CERT_CB 3"
-.TH SSL_CTX_SET_CERT_CB 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET_CERT_CB 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set_cert_store.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set_cert_store.3
index 50afa6b0a815d..be0defc3701ae 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set_cert_store.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set_cert_store.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET_CERT_STORE 3"
-.TH SSL_CTX_SET_CERT_STORE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET_CERT_STORE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set_cert_verify_callback.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set_cert_verify_callback.3
index 79339b0eb09bf..2bb44054eea65 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set_cert_verify_callback.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set_cert_verify_callback.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET_CERT_VERIFY_CALLBACK 3"
-.TH SSL_CTX_SET_CERT_VERIFY_CALLBACK 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET_CERT_VERIFY_CALLBACK 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set_cipher_list.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set_cipher_list.3
index 8d43da2e06fbf..df55cfff25659 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set_cipher_list.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set_cipher_list.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET_CIPHER_LIST 3"
-.TH SSL_CTX_SET_CIPHER_LIST 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET_CIPHER_LIST 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set_client_cert_cb.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set_client_cert_cb.3
index a91cc5e9af9d5..92a7a740801e6 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set_client_cert_cb.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set_client_cert_cb.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET_CLIENT_CERT_CB 3"
-.TH SSL_CTX_SET_CLIENT_CERT_CB 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET_CLIENT_CERT_CB 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set_client_hello_cb.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set_client_hello_cb.3
index 1a2b18a81c7cc..77d590541f660 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set_client_hello_cb.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set_client_hello_cb.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET_CLIENT_HELLO_CB 3"
-.TH SSL_CTX_SET_CLIENT_HELLO_CB 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET_CLIENT_HELLO_CB 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set_ct_validation_callback.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set_ct_validation_callback.3
index e0fdf43e2cd33..69ee24ed33a74 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set_ct_validation_callback.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set_ct_validation_callback.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET_CT_VALIDATION_CALLBACK 3"
-.TH SSL_CTX_SET_CT_VALIDATION_CALLBACK 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET_CT_VALIDATION_CALLBACK 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set_ctlog_list_file.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set_ctlog_list_file.3
index 3bdd74ecebc07..b6b68721ed3e4 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set_ctlog_list_file.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set_ctlog_list_file.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET_CTLOG_LIST_FILE 3"
-.TH SSL_CTX_SET_CTLOG_LIST_FILE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET_CTLOG_LIST_FILE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set_default_passwd_cb.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set_default_passwd_cb.3
index 3485cdb6edc1a..6d29c03b2c7ec 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set_default_passwd_cb.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set_default_passwd_cb.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET_DEFAULT_PASSWD_CB 3"
-.TH SSL_CTX_SET_DEFAULT_PASSWD_CB 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET_DEFAULT_PASSWD_CB 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set_ex_data.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set_ex_data.3
index 5985e6665802f..d63d993aa0a5e 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set_ex_data.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set_ex_data.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET_EX_DATA 3"
-.TH SSL_CTX_SET_EX_DATA 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET_EX_DATA 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set_generate_session_id.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set_generate_session_id.3
index f8396c00f5cb6..86c43be24493d 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set_generate_session_id.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set_generate_session_id.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET_GENERATE_SESSION_ID 3"
-.TH SSL_CTX_SET_GENERATE_SESSION_ID 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET_GENERATE_SESSION_ID 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set_info_callback.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set_info_callback.3
index e435ea52e379e..f43d79409280c 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set_info_callback.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set_info_callback.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET_INFO_CALLBACK 3"
-.TH SSL_CTX_SET_INFO_CALLBACK 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET_INFO_CALLBACK 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set_keylog_callback.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set_keylog_callback.3
index 1b8b17e2f61e3..eec135db9e6ed 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set_keylog_callback.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set_keylog_callback.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET_KEYLOG_CALLBACK 3"
-.TH SSL_CTX_SET_KEYLOG_CALLBACK 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET_KEYLOG_CALLBACK 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set_max_cert_list.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set_max_cert_list.3
index 60b97757e99c9..a8aad45da3f40 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set_max_cert_list.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set_max_cert_list.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET_MAX_CERT_LIST 3"
-.TH SSL_CTX_SET_MAX_CERT_LIST 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET_MAX_CERT_LIST 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set_min_proto_version.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set_min_proto_version.3
index 5efc3acf76dd7..5c466f1e96551 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set_min_proto_version.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set_min_proto_version.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET_MIN_PROTO_VERSION 3"
-.TH SSL_CTX_SET_MIN_PROTO_VERSION 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET_MIN_PROTO_VERSION 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set_mode.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set_mode.3
index 0f0ceaeeaf25c..9acf3a8faa6d4 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set_mode.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set_mode.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET_MODE 3"
-.TH SSL_CTX_SET_MODE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET_MODE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set_msg_callback.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set_msg_callback.3
index 1270177030a7b..e9af7cbdf33de 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set_msg_callback.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set_msg_callback.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET_MSG_CALLBACK 3"
-.TH SSL_CTX_SET_MSG_CALLBACK 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET_MSG_CALLBACK 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set_num_tickets.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set_num_tickets.3
index 153474a35e2c8..95af85135b117 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set_num_tickets.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set_num_tickets.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET_NUM_TICKETS 3"
-.TH SSL_CTX_SET_NUM_TICKETS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET_NUM_TICKETS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set_options.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set_options.3
index 305cad4f518e5..5595eee28357c 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set_options.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set_options.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET_OPTIONS 3"
-.TH SSL_CTX_SET_OPTIONS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET_OPTIONS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set_psk_client_callback.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set_psk_client_callback.3
index 3caa1bbb92f50..a64f08be153d5 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set_psk_client_callback.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set_psk_client_callback.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET_PSK_CLIENT_CALLBACK 3"
-.TH SSL_CTX_SET_PSK_CLIENT_CALLBACK 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET_PSK_CLIENT_CALLBACK 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set_quiet_shutdown.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set_quiet_shutdown.3
index 8fd992798ca32..b667e4a9b8f78 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set_quiet_shutdown.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set_quiet_shutdown.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET_QUIET_SHUTDOWN 3"
-.TH SSL_CTX_SET_QUIET_SHUTDOWN 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET_QUIET_SHUTDOWN 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set_read_ahead.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set_read_ahead.3
index 1607028aa487f..46c5fb5d21eeb 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set_read_ahead.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set_read_ahead.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET_READ_AHEAD 3"
-.TH SSL_CTX_SET_READ_AHEAD 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET_READ_AHEAD 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set_record_padding_callback.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set_record_padding_callback.3
index 35156c891bb6f..a5bac422e45a3 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set_record_padding_callback.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set_record_padding_callback.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET_RECORD_PADDING_CALLBACK 3"
-.TH SSL_CTX_SET_RECORD_PADDING_CALLBACK 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET_RECORD_PADDING_CALLBACK 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set_security_level.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set_security_level.3
index 63202d7e4d597..c7d63181adec5 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set_security_level.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set_security_level.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET_SECURITY_LEVEL 3"
-.TH SSL_CTX_SET_SECURITY_LEVEL 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET_SECURITY_LEVEL 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set_session_cache_mode.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set_session_cache_mode.3
index 1b11b9a1859e6..ecedd4d5b421c 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set_session_cache_mode.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set_session_cache_mode.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET_SESSION_CACHE_MODE 3"
-.TH SSL_CTX_SET_SESSION_CACHE_MODE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET_SESSION_CACHE_MODE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set_session_id_context.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set_session_id_context.3
index aa394e1330595..781b03c0d0cfb 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set_session_id_context.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set_session_id_context.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET_SESSION_ID_CONTEXT 3"
-.TH SSL_CTX_SET_SESSION_ID_CONTEXT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET_SESSION_ID_CONTEXT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set_session_ticket_cb.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set_session_ticket_cb.3
index d71e9c1375345..e113ef6f4b6d1 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set_session_ticket_cb.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set_session_ticket_cb.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET_SESSION_TICKET_CB 3"
-.TH SSL_CTX_SET_SESSION_TICKET_CB 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET_SESSION_TICKET_CB 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set_split_send_fragment.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set_split_send_fragment.3
index b44ace46f3251..6ff6875ebc643 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set_split_send_fragment.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set_split_send_fragment.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET_SPLIT_SEND_FRAGMENT 3"
-.TH SSL_CTX_SET_SPLIT_SEND_FRAGMENT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET_SPLIT_SEND_FRAGMENT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set_ssl_version.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set_ssl_version.3
index 44b993569654a..56c5c5432e019 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set_ssl_version.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set_ssl_version.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET_SSL_VERSION 3"
-.TH SSL_CTX_SET_SSL_VERSION 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET_SSL_VERSION 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set_stateless_cookie_generate_cb.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set_stateless_cookie_generate_cb.3
index 24e74e8f4ee53..3c7cfb6555eda 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set_stateless_cookie_generate_cb.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set_stateless_cookie_generate_cb.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,13 +133,13 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET_STATELESS_COOKIE_GENERATE_CB 3"
-.TH SSL_CTX_SET_STATELESS_COOKIE_GENERATE_CB 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET_STATELESS_COOKIE_GENERATE_CB 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
.nh
.SH "NAME"
-SSL_CTX_set_stateless_cookie_generate_cb, SSL_CTX_set_stateless_cookie_verify_cb \&\- Callback functions for stateless TLS1.3 cookies
+SSL_CTX_set_stateless_cookie_generate_cb, SSL_CTX_set_stateless_cookie_verify_cb, SSL_CTX_set_cookie_generate_cb, SSL_CTX_set_cookie_verify_cb \&\- Callback functions for stateless TLS1.3 cookies
.SH "SYNOPSIS"
.IX Header "SYNOPSIS"
.Vb 1
@@ -155,29 +155,63 @@ SSL_CTX_set_stateless_cookie_generate_cb, SSL_CTX_set_stateless_cookie_verify_cb
\& int (*verify_stateless_cookie_cb) (SSL *ssl,
\& const unsigned char *cookie,
\& size_t cookie_len));
+\&
+\& void SSL_CTX_set_cookie_generate_cb(SSL_CTX *ctx,
+\& int (*app_gen_cookie_cb) (SSL *ssl,
+\& unsigned char
+\& *cookie,
+\& unsigned int
+\& *cookie_len));
+\& void SSL_CTX_set_cookie_verify_cb(SSL_CTX *ctx,
+\& int (*app_verify_cookie_cb) (SSL *ssl,
+\& const unsigned
+\& char *cookie,
+\& unsigned int
+\& cookie_len));
.Ve
.SH "DESCRIPTION"
.IX Header "DESCRIPTION"
-\&\fBSSL_CTX_set_cookie_generate_cb()\fR sets the callback used by \fBSSL_stateless\fR\|(3)
-to generate the application-controlled portion of the cookie provided to clients
-in the HelloRetryRequest transmitted as a response to a ClientHello with a
-missing or invalid cookie. \fBgen_stateless_cookie_cb()\fR must write at most
-\&\s-1SSL_COOKIE_LENGTH\s0 bytes into \fBcookie\fR, and must write the number of bytes
-written to \fBcookie_len\fR. If a cookie cannot be generated, a zero return value
-can be used to abort the handshake.
+\&\fBSSL_CTX_set_stateless_cookie_generate_cb()\fR sets the callback used by
+\&\fBSSL_stateless\fR\|(3) to generate the application-controlled portion of the cookie
+provided to clients in the HelloRetryRequest transmitted as a response to a
+ClientHello with a missing or invalid cookie. \fBgen_stateless_cookie_cb()\fR must
+write at most \s-1SSL_COOKIE_LENGTH\s0 bytes into \fBcookie\fR, and must write the number
+of bytes written to \fBcookie_len\fR. If a cookie cannot be generated, a zero
+return value can be used to abort the handshake.
+.PP
+\&\fBSSL_CTX_set_stateless_cookie_verify_cb()\fR sets the callback used by
+\&\fBSSL_stateless\fR\|(3) to determine whether the application-controlled portion of a
+ClientHello cookie is valid. The cookie data is pointed to by \fBcookie\fR and is of
+length \fBcookie_len\fR. A nonzero return value from \fBverify_stateless_cookie_cb()\fR
+communicates that the cookie is valid. The integrity of the entire cookie,
+including the application-controlled portion, is automatically verified by \s-1HMAC\s0
+before \fBverify_stateless_cookie_cb()\fR is called.
+.PP
+\&\fBSSL_CTX_set_cookie_generate_cb()\fR sets the callback used by \fBDTLSv1_listen\fR\|(3)
+to generate the cookie provided to clients in the HelloVerifyRequest transmitted
+as a response to a ClientHello with a missing or invalid cookie.
+\&\fBapp_gen_cookie_cb()\fR must write at most \s-1DTLS1_COOKIE_LENGTH\s0 bytes into
+\&\fBcookie\fR, and must write the number of bytes written to \fBcookie_len\fR. If a
+cookie cannot be generated, a zero return value can be used to abort the
+handshake.
.PP
-\&\fBSSL_CTX_set_cookie_verify_cb()\fR sets the callback used by \fBSSL_stateless\fR\|(3) to
-determine whether the application-controlled portion of a ClientHello cookie is
-valid. A nonzero return value from \fBapp_verify_cookie_cb()\fR communicates that the
-cookie is valid. The integrity of the entire cookie, including the
-application-controlled portion, is automatically verified by \s-1HMAC\s0 before
-\&\fBverify_stateless_cookie_cb()\fR is called.
+\&\fBSSL_CTX_set_cookie_verify_cb()\fR sets the callback used by \fBDTLSv1_listen\fR\|(3) to
+determine whether the cookie in a ClientHello is valid. The cookie data is
+pointed to by \fBcookie\fR and is of length \fBcookie_len\fR. A nonzero return value
+from \fBapp_verify_cookie_cb()\fR communicates that the cookie is valid. The
+integrity of the cookie is not verified by OpenSSL. This is an application
+responsibility.
.SH "RETURN VALUES"
.IX Header "RETURN VALUES"
Neither function returns a value.
.SH "SEE ALSO"
.IX Header "SEE ALSO"
-\&\fBSSL_stateless\fR\|(3)
+\&\fBSSL_stateless\fR\|(3),
+\&\fBDTLSv1_listen\fR\|(3)
+.SH "HISTORY"
+.IX Header "HISTORY"
+\&\fBSSL_CTX_set_stateless_cookie_generate_cb()\fR and
+\&\fBSSL_CTX_set_stateless_cookie_verify_cb()\fR were added in OpenSSL 1.1.1.
.SH "COPYRIGHT"
.IX Header "COPYRIGHT"
Copyright 2018 The OpenSSL Project Authors. All Rights Reserved.
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set_timeout.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set_timeout.3
index d570a55d44ef5..fc553f46d316b 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set_timeout.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set_timeout.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET_TIMEOUT 3"
-.TH SSL_CTX_SET_TIMEOUT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET_TIMEOUT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set_tlsext_servername_callback.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set_tlsext_servername_callback.3
index cd619b0f5a974..a95f0a2f9211f 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set_tlsext_servername_callback.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set_tlsext_servername_callback.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET_TLSEXT_SERVERNAME_CALLBACK 3"
-.TH SSL_CTX_SET_TLSEXT_SERVERNAME_CALLBACK 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET_TLSEXT_SERVERNAME_CALLBACK 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
@@ -146,7 +146,7 @@ SSL_CTX_set_tlsext_servername_callback, SSL_CTX_set_tlsext_servername_arg, SSL_g
\& #include <openssl/ssl.h>
\&
\& long SSL_CTX_set_tlsext_servername_callback(SSL_CTX *ctx,
-\& int (*cb)(SSL *, int *, void *));
+\& int (*cb)(SSL *s, int *al, void *arg));
\& long SSL_CTX_set_tlsext_servername_arg(SSL_CTX *ctx, void *arg);
\&
\& const char *SSL_get_servername(const SSL *s, const int type);
@@ -156,21 +156,84 @@ SSL_CTX_set_tlsext_servername_callback, SSL_CTX_set_tlsext_servername_arg, SSL_g
.Ve
.SH "DESCRIPTION"
.IX Header "DESCRIPTION"
-The functionality provided by the servername callback is superseded by the
-ClientHello callback, which can be set using \fBSSL_CTX_set_client_hello_cb()\fR.
-The servername callback is retained for historical compatibility.
+The functionality provided by the servername callback is mostly superseded by
+the ClientHello callback, which can be set using \fBSSL_CTX_set_client_hello_cb()\fR.
+However, even where the ClientHello callback is used, the servername callback is
+still necessary in order to acknowledge the servername requested by the client.
.PP
\&\fBSSL_CTX_set_tlsext_servername_callback()\fR sets the application callback \fBcb\fR
used by a server to perform any actions or configuration required based on
the servername extension received in the incoming connection. When \fBcb\fR
-is \s-1NULL, SNI\s0 is not used. The \fBarg\fR value is a pointer which is passed to
-the application callback.
+is \s-1NULL, SNI\s0 is not used.
+.PP
+The servername callback should return one of the following values:
+.IP "\s-1SSL_TLSEXT_ERR_OK\s0" 4
+.IX Item "SSL_TLSEXT_ERR_OK"
+This is used to indicate that the servername requested by the client has been
+accepted. Typically a server will call \fBSSL_set_SSL_CTX()\fR in the callback to set
+up a different configuration for the selected servername in this case.
+.IP "\s-1SSL_TLSEXT_ERR_ALERT_FATAL\s0" 4
+.IX Item "SSL_TLSEXT_ERR_ALERT_FATAL"
+In this case the servername requested by the client is not accepted and the
+handshake will be aborted. The value of the alert to be used should be stored in
+the location pointed to by the \fBal\fR parameter to the callback. By default this
+value is initialised to \s-1SSL_AD_UNRECOGNIZED_NAME.\s0
+.IP "\s-1SSL_TLSEXT_ERR_ALERT_WARNING\s0" 4
+.IX Item "SSL_TLSEXT_ERR_ALERT_WARNING"
+If this value is returned then the servername is not accepted by the server.
+However the handshake will continue and send a warning alert instead. The value
+of the alert should be stored in the location pointed to by the \fBal\fR parameter
+as for \s-1SSL_TLSEXT_ERR_ALERT_FATAL\s0 above. Note that TLSv1.3 does not support
+warning alerts, so if TLSv1.3 has been negotiated then this return value is
+treated the same way as \s-1SSL_TLSEXT_ERR_NOACK.\s0
+.IP "\s-1SSL_TLSEXT_ERR_NOACK\s0" 4
+.IX Item "SSL_TLSEXT_ERR_NOACK"
+This return value indicates that the servername is not accepted by the server.
+No alerts are sent and the server will not acknowledge the requested servername.
.PP
\&\fBSSL_CTX_set_tlsext_servername_arg()\fR sets a context-specific argument to be
-passed into the callback for this \fB\s-1SSL_CTX\s0\fR.
+passed into the callback (via the \fBarg\fR parameter) for this \fB\s-1SSL_CTX\s0\fR.
+.PP
+The behaviour of \fBSSL_get_servername()\fR depends on a number of different factors.
+In particular note that in TLSv1.3 the servername is negotiated in every
+handshake. In TLSv1.2 the servername is only negotiated on initial handshakes
+and not on resumption handshakes.
+.IP "On the client, before the handshake" 4
+.IX Item "On the client, before the handshake"
+If a servername has been set via a call to \fBSSL_set_tlsext_host_name()\fR then it
+will return that servername.
+.Sp
+If one has not been set, but a TLSv1.2 resumption is being attempted and the
+session from the original handshake had a servername accepted by the server then
+it will return that servername.
+.Sp
+Otherwise it returns \s-1NULL.\s0
+.IP "On the client, during or after the handshake and a TLSv1.2 (or below) resumption occurred" 4
+.IX Item "On the client, during or after the handshake and a TLSv1.2 (or below) resumption occurred"
+If the session from the orignal handshake had a servername accepted by the
+server then it will return that servername.
+.Sp
+Otherwise it returns the servername set via \fBSSL_set_tlsext_host_name()\fR or \s-1NULL\s0
+if it was not called.
+.IP "On the client, during or after the handshake and a TLSv1.2 (or below) resumption did not occur" 4
+.IX Item "On the client, during or after the handshake and a TLSv1.2 (or below) resumption did not occur"
+It will return the servername set via \fBSSL_set_tlsext_host_name()\fR or \s-1NULL\s0 if it
+was not called.
+.IP "On the server, before the handshake" 4
+.IX Item "On the server, before the handshake"
+The function will always return \s-1NULL\s0 before the handshake
+.IP "On the server, after the servername extension has been processed and a TLSv1.2 (or below) resumption occurred" 4
+.IX Item "On the server, after the servername extension has been processed and a TLSv1.2 (or below) resumption occurred"
+If a servername was accepted by the server in the original handshake then it
+will return that servername, or \s-1NULL\s0 otherwise.
+.IP "On the server, after the servername extension has been processed and a TLSv1.2 (or below) resumption did not occur" 4
+.IX Item "On the server, after the servername extension has been processed and a TLSv1.2 (or below) resumption did not occur"
+The function will return the servername requested by the client in this
+handshake or \s-1NULL\s0 if none was requested.
.PP
-\&\fBSSL_get_servername()\fR returns a servername extension value of the specified
-type if provided in the Client Hello or \s-1NULL.\s0
+Note that the ClientHello callback occurs before a servername extension from the
+client is processed. The servername, certificate and \s-1ALPN\s0 callbacks occur after
+a servername extension from the client is processed.
.PP
\&\fBSSL_get_servername_type()\fR returns the servername type or \-1 if no servername
is present. Currently the only supported type (defined in \s-1RFC3546\s0) is
@@ -196,9 +259,25 @@ that will act as clients; otherwise the configured \fBname\fR will be ignored.
.IX Header "SEE ALSO"
\&\fBssl\fR\|(7), \fBSSL_CTX_set_alpn_select_cb\fR\|(3),
\&\fBSSL_get0_alpn_selected\fR\|(3), \fBSSL_CTX_set_client_hello_cb\fR\|(3)
+.SH "HISTORY"
+.IX Header "HISTORY"
+\&\fBSSL_get_servername()\fR historically provided some unexpected results in certain
+corner cases. This has been fixed from OpenSSL 1.1.1e.
+.PP
+Prior to 1.1.1e, when the client requested a servername in an initial TLSv1.2
+handshake, the server accepted it, and then the client successfully resumed but
+set a different explict servername in the second handshake then when called by
+the client it returned the servername from the second handshake. This has now
+been changed to return the servername requested in the original handshake.
+.PP
+Also prior to 1.1.1e, if the client sent a servername in the first handshake but
+the server did not accept it, and then a second handshake occured where TLSv1.2
+resumption was successful then when called by the server it returned the
+servername requested in the original handshake. This has now been changed to
+\&\s-1NULL.\s0
.SH "COPYRIGHT"
.IX Header "COPYRIGHT"
-Copyright 2017 The OpenSSL Project Authors. All Rights Reserved.
+Copyright 2017\-2020 The OpenSSL Project Authors. All Rights Reserved.
.PP
Licensed under the OpenSSL license (the \*(L"License\*(R"). You may not use
this file except in compliance with the License. You can obtain a copy
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set_tlsext_status_cb.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set_tlsext_status_cb.3
index 25053e2f88fe1..894a9e7949db1 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set_tlsext_status_cb.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set_tlsext_status_cb.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET_TLSEXT_STATUS_CB 3"
-.TH SSL_CTX_SET_TLSEXT_STATUS_CB 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET_TLSEXT_STATUS_CB 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set_tlsext_ticket_key_cb.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set_tlsext_ticket_key_cb.3
index d86500b0e4d7c..625d87a62580f 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set_tlsext_ticket_key_cb.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set_tlsext_ticket_key_cb.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET_TLSEXT_TICKET_KEY_CB 3"
-.TH SSL_CTX_SET_TLSEXT_TICKET_KEY_CB 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET_TLSEXT_TICKET_KEY_CB 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set_tlsext_use_srtp.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set_tlsext_use_srtp.3
index a7c747c01289d..e39a5eb756518 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set_tlsext_use_srtp.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set_tlsext_use_srtp.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET_TLSEXT_USE_SRTP 3"
-.TH SSL_CTX_SET_TLSEXT_USE_SRTP 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET_TLSEXT_USE_SRTP 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set_tmp_dh_callback.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set_tmp_dh_callback.3
index ef6c569a920fc..e53f01ecc3551 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set_tmp_dh_callback.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set_tmp_dh_callback.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET_TMP_DH_CALLBACK 3"
-.TH SSL_CTX_SET_TMP_DH_CALLBACK 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET_TMP_DH_CALLBACK 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_set_verify.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_set_verify.3
index 30a09f7748731..168fffb4aa115 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_set_verify.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_set_verify.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_SET_VERIFY 3"
-.TH SSL_CTX_SET_VERIFY 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_SET_VERIFY 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_use_certificate.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_use_certificate.3
index 2adc7071de396..8c79f52225e82 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_use_certificate.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_use_certificate.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_USE_CERTIFICATE 3"
-.TH SSL_CTX_USE_CERTIFICATE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_USE_CERTIFICATE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_use_psk_identity_hint.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_use_psk_identity_hint.3
index eecfefa5774f5..d344e2c586748 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_use_psk_identity_hint.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_use_psk_identity_hint.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_USE_PSK_IDENTITY_HINT 3"
-.TH SSL_CTX_USE_PSK_IDENTITY_HINT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_USE_PSK_IDENTITY_HINT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
@@ -168,9 +168,9 @@ SSL_psk_server_cb_func, SSL_psk_find_session_cb_func, SSL_CTX_use_psk_identity_h
.Ve
.SH "DESCRIPTION"
.IX Header "DESCRIPTION"
-A client application wishing to use TLSv1.3 PSKs should set a callback
-using either \fBSSL_CTX_set_psk_use_session_callback()\fR or
-\&\fBSSL_set_psk_use_session_callback()\fR as appropriate.
+A server application wishing to use TLSv1.3 PSKs should set a callback
+using either \fBSSL_CTX_set_psk_find_session_callback()\fR or
+\&\fBSSL_set_psk_find_session_callback()\fR as appropriate.
.PP
The callback function is given a pointer to the \s-1SSL\s0 connection in \fBssl\fR and
an identity in \fBidentity\fR of length \fBidentity_len\fR. The callback function
diff --git a/secure/lib/libcrypto/man/man3/SSL_CTX_use_serverinfo.3 b/secure/lib/libcrypto/man/man3/SSL_CTX_use_serverinfo.3
index 6493233ed82f6..71746002ccb50 100644
--- a/secure/lib/libcrypto/man/man3/SSL_CTX_use_serverinfo.3
+++ b/secure/lib/libcrypto/man/man3/SSL_CTX_use_serverinfo.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CTX_USE_SERVERINFO 3"
-.TH SSL_CTX_USE_SERVERINFO 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CTX_USE_SERVERINFO 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_SESSION_free.3 b/secure/lib/libcrypto/man/man3/SSL_SESSION_free.3
index 212ba5c9e78ee..7670a8f18163e 100644
--- a/secure/lib/libcrypto/man/man3/SSL_SESSION_free.3
+++ b/secure/lib/libcrypto/man/man3/SSL_SESSION_free.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_SESSION_FREE 3"
-.TH SSL_SESSION_FREE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_SESSION_FREE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_SESSION_get0_cipher.3 b/secure/lib/libcrypto/man/man3/SSL_SESSION_get0_cipher.3
index 021eb8a99f040..5c0c7201d83e2 100644
--- a/secure/lib/libcrypto/man/man3/SSL_SESSION_get0_cipher.3
+++ b/secure/lib/libcrypto/man/man3/SSL_SESSION_get0_cipher.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_SESSION_GET0_CIPHER 3"
-.TH SSL_SESSION_GET0_CIPHER 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_SESSION_GET0_CIPHER 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_SESSION_get0_hostname.3 b/secure/lib/libcrypto/man/man3/SSL_SESSION_get0_hostname.3
index 81f3cf9f0adf3..060a5fec50306 100644
--- a/secure/lib/libcrypto/man/man3/SSL_SESSION_get0_hostname.3
+++ b/secure/lib/libcrypto/man/man3/SSL_SESSION_get0_hostname.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_SESSION_GET0_HOSTNAME 3"
-.TH SSL_SESSION_GET0_HOSTNAME 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_SESSION_GET0_HOSTNAME 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
@@ -157,7 +157,10 @@ SSL_SESSION_get0_hostname, SSL_SESSION_set1_hostname, SSL_SESSION_get0_alpn_sele
.SH "DESCRIPTION"
.IX Header "DESCRIPTION"
\&\fBSSL_SESSION_get0_hostname()\fR retrieves the \s-1SNI\s0 value that was sent by the
-client when the session was created, or \s-1NULL\s0 if no value was sent.
+client when the session was created if it was accepted by the server and TLSv1.2
+or below was negotiated. Otherwise \s-1NULL\s0 is returned. Note that in TLSv1.3 the
+\&\s-1SNI\s0 hostname is negotiated with each handshake including resumption handshakes
+and is therefore never associated with the session.
.PP
The value returned is a pointer to memory maintained within \fBs\fR and
should not be free'd.
@@ -193,7 +196,7 @@ The \fBSSL_SESSION_set1_hostname()\fR, \fBSSL_SESSION_get0_alpn_selected()\fR an
\&\fBSSL_SESSION_set1_alpn_selected()\fR functions were added in OpenSSL 1.1.1.
.SH "COPYRIGHT"
.IX Header "COPYRIGHT"
-Copyright 2016\-2019 The OpenSSL Project Authors. All Rights Reserved.
+Copyright 2016\-2020 The OpenSSL Project Authors. All Rights Reserved.
.PP
Licensed under the OpenSSL license (the \*(L"License\*(R"). You may not use
this file except in compliance with the License. You can obtain a copy
diff --git a/secure/lib/libcrypto/man/man3/SSL_SESSION_get0_id_context.3 b/secure/lib/libcrypto/man/man3/SSL_SESSION_get0_id_context.3
index 08e6b1e1a9a66..14da74384bc10 100644
--- a/secure/lib/libcrypto/man/man3/SSL_SESSION_get0_id_context.3
+++ b/secure/lib/libcrypto/man/man3/SSL_SESSION_get0_id_context.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_SESSION_GET0_ID_CONTEXT 3"
-.TH SSL_SESSION_GET0_ID_CONTEXT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_SESSION_GET0_ID_CONTEXT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_SESSION_get0_peer.3 b/secure/lib/libcrypto/man/man3/SSL_SESSION_get0_peer.3
index 10492f60d8ef0..4f8e4246e790f 100644
--- a/secure/lib/libcrypto/man/man3/SSL_SESSION_get0_peer.3
+++ b/secure/lib/libcrypto/man/man3/SSL_SESSION_get0_peer.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_SESSION_GET0_PEER 3"
-.TH SSL_SESSION_GET0_PEER 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_SESSION_GET0_PEER 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_SESSION_get_compress_id.3 b/secure/lib/libcrypto/man/man3/SSL_SESSION_get_compress_id.3
index 7cfcd71fa3e0a..e707b246a98ec 100644
--- a/secure/lib/libcrypto/man/man3/SSL_SESSION_get_compress_id.3
+++ b/secure/lib/libcrypto/man/man3/SSL_SESSION_get_compress_id.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_SESSION_GET_COMPRESS_ID 3"
-.TH SSL_SESSION_GET_COMPRESS_ID 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_SESSION_GET_COMPRESS_ID 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_SESSION_get_ex_data.3 b/secure/lib/libcrypto/man/man3/SSL_SESSION_get_ex_data.3
index b38fe75ba97ec..d8a744a308d2c 100644
--- a/secure/lib/libcrypto/man/man3/SSL_SESSION_get_ex_data.3
+++ b/secure/lib/libcrypto/man/man3/SSL_SESSION_get_ex_data.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_SESSION_GET_EX_DATA 3"
-.TH SSL_SESSION_GET_EX_DATA 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_SESSION_GET_EX_DATA 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_SESSION_get_protocol_version.3 b/secure/lib/libcrypto/man/man3/SSL_SESSION_get_protocol_version.3
index d4affa087015d..ccdda99e07e35 100644
--- a/secure/lib/libcrypto/man/man3/SSL_SESSION_get_protocol_version.3
+++ b/secure/lib/libcrypto/man/man3/SSL_SESSION_get_protocol_version.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_SESSION_GET_PROTOCOL_VERSION 3"
-.TH SSL_SESSION_GET_PROTOCOL_VERSION 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_SESSION_GET_PROTOCOL_VERSION 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_SESSION_get_time.3 b/secure/lib/libcrypto/man/man3/SSL_SESSION_get_time.3
index 52234cc258f7c..15723df27a73b 100644
--- a/secure/lib/libcrypto/man/man3/SSL_SESSION_get_time.3
+++ b/secure/lib/libcrypto/man/man3/SSL_SESSION_get_time.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_SESSION_GET_TIME 3"
-.TH SSL_SESSION_GET_TIME 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_SESSION_GET_TIME 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_SESSION_has_ticket.3 b/secure/lib/libcrypto/man/man3/SSL_SESSION_has_ticket.3
index 383cd2cdc568a..3e3483081ef65 100644
--- a/secure/lib/libcrypto/man/man3/SSL_SESSION_has_ticket.3
+++ b/secure/lib/libcrypto/man/man3/SSL_SESSION_has_ticket.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_SESSION_HAS_TICKET 3"
-.TH SSL_SESSION_HAS_TICKET 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_SESSION_HAS_TICKET 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_SESSION_is_resumable.3 b/secure/lib/libcrypto/man/man3/SSL_SESSION_is_resumable.3
index 59bcb792ce238..89f18e69ff0de 100644
--- a/secure/lib/libcrypto/man/man3/SSL_SESSION_is_resumable.3
+++ b/secure/lib/libcrypto/man/man3/SSL_SESSION_is_resumable.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_SESSION_IS_RESUMABLE 3"
-.TH SSL_SESSION_IS_RESUMABLE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_SESSION_IS_RESUMABLE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_SESSION_print.3 b/secure/lib/libcrypto/man/man3/SSL_SESSION_print.3
index ce1dc38f3083b..b807f526f0d43 100644
--- a/secure/lib/libcrypto/man/man3/SSL_SESSION_print.3
+++ b/secure/lib/libcrypto/man/man3/SSL_SESSION_print.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_SESSION_PRINT 3"
-.TH SSL_SESSION_PRINT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_SESSION_PRINT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_SESSION_set1_id.3 b/secure/lib/libcrypto/man/man3/SSL_SESSION_set1_id.3
index dd1f1f758e74a..0104028c2e4a9 100644
--- a/secure/lib/libcrypto/man/man3/SSL_SESSION_set1_id.3
+++ b/secure/lib/libcrypto/man/man3/SSL_SESSION_set1_id.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_SESSION_SET1_ID 3"
-.TH SSL_SESSION_SET1_ID 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_SESSION_SET1_ID 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_accept.3 b/secure/lib/libcrypto/man/man3/SSL_accept.3
index bd49f996741bd..9045980fb613a 100644
--- a/secure/lib/libcrypto/man/man3/SSL_accept.3
+++ b/secure/lib/libcrypto/man/man3/SSL_accept.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_ACCEPT 3"
-.TH SSL_ACCEPT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_ACCEPT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
@@ -185,7 +185,7 @@ established.
.IX Item "<0"
The \s-1TLS/SSL\s0 handshake was not successful because a fatal error occurred either
at the protocol level or a connection failure occurred. The shutdown was
-not clean. It can also occur of action is need to continue the operation
+not clean. It can also occur if action is needed to continue the operation
for non-blocking BIOs. Call \fBSSL_get_error()\fR with the return value \fBret\fR
to find out the reason.
.SH "SEE ALSO"
@@ -197,7 +197,7 @@ to find out the reason.
\&\fBSSL_CTX_new\fR\|(3)
.SH "COPYRIGHT"
.IX Header "COPYRIGHT"
-Copyright 2000\-2016 The OpenSSL Project Authors. All Rights Reserved.
+Copyright 2000\-2020 The OpenSSL Project Authors. All Rights Reserved.
.PP
Licensed under the OpenSSL license (the \*(L"License\*(R"). You may not use
this file except in compliance with the License. You can obtain a copy
diff --git a/secure/lib/libcrypto/man/man3/SSL_alert_type_string.3 b/secure/lib/libcrypto/man/man3/SSL_alert_type_string.3
index cfb15a0b84ed3..cc268a2c32314 100644
--- a/secure/lib/libcrypto/man/man3/SSL_alert_type_string.3
+++ b/secure/lib/libcrypto/man/man3/SSL_alert_type_string.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_ALERT_TYPE_STRING 3"
-.TH SSL_ALERT_TYPE_STRING 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_ALERT_TYPE_STRING 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_alloc_buffers.3 b/secure/lib/libcrypto/man/man3/SSL_alloc_buffers.3
index 7846926ec896f..48c6040ac8213 100644
--- a/secure/lib/libcrypto/man/man3/SSL_alloc_buffers.3
+++ b/secure/lib/libcrypto/man/man3/SSL_alloc_buffers.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_ALLOC_BUFFERS 3"
-.TH SSL_ALLOC_BUFFERS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_ALLOC_BUFFERS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_check_chain.3 b/secure/lib/libcrypto/man/man3/SSL_check_chain.3
index 96fa735735d41..dc1f00a1430e8 100644
--- a/secure/lib/libcrypto/man/man3/SSL_check_chain.3
+++ b/secure/lib/libcrypto/man/man3/SSL_check_chain.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CHECK_CHAIN 3"
-.TH SSL_CHECK_CHAIN 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CHECK_CHAIN 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_clear.3 b/secure/lib/libcrypto/man/man3/SSL_clear.3
index 50dd9bf0b090f..2a723ca5950c9 100644
--- a/secure/lib/libcrypto/man/man3/SSL_clear.3
+++ b/secure/lib/libcrypto/man/man3/SSL_clear.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CLEAR 3"
-.TH SSL_CLEAR 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CLEAR 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_connect.3 b/secure/lib/libcrypto/man/man3/SSL_connect.3
index 3cf6cb35c962a..47a9a585bf0dd 100644
--- a/secure/lib/libcrypto/man/man3/SSL_connect.3
+++ b/secure/lib/libcrypto/man/man3/SSL_connect.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_CONNECT 3"
-.TH SSL_CONNECT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_CONNECT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
@@ -200,7 +200,7 @@ established.
.IX Item "<0"
The \s-1TLS/SSL\s0 handshake was not successful, because a fatal error occurred either
at the protocol level or a connection failure occurred. The shutdown was
-not clean. It can also occur of action is need to continue the operation
+not clean. It can also occur if action is needed to continue the operation
for non-blocking BIOs. Call \fBSSL_get_error()\fR with the return value \fBret\fR
to find out the reason.
.SH "SEE ALSO"
@@ -212,7 +212,7 @@ to find out the reason.
\&\fBSSL_CTX_new\fR\|(3)
.SH "COPYRIGHT"
.IX Header "COPYRIGHT"
-Copyright 2000\-2018 The OpenSSL Project Authors. All Rights Reserved.
+Copyright 2000\-2020 The OpenSSL Project Authors. All Rights Reserved.
.PP
Licensed under the OpenSSL license (the \*(L"License\*(R"). You may not use
this file except in compliance with the License. You can obtain a copy
diff --git a/secure/lib/libcrypto/man/man3/SSL_do_handshake.3 b/secure/lib/libcrypto/man/man3/SSL_do_handshake.3
index 7b1bc59ab3ddb..378dd5342912b 100644
--- a/secure/lib/libcrypto/man/man3/SSL_do_handshake.3
+++ b/secure/lib/libcrypto/man/man3/SSL_do_handshake.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_DO_HANDSHAKE 3"
-.TH SSL_DO_HANDSHAKE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_DO_HANDSHAKE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
@@ -186,7 +186,7 @@ established.
.IX Item "<0"
The \s-1TLS/SSL\s0 handshake was not successful because a fatal error occurred either
at the protocol level or a connection failure occurred. The shutdown was
-not clean. It can also occur of action is need to continue the operation
+not clean. It can also occur if action is needed to continue the operation
for non-blocking BIOs. Call \fBSSL_get_error()\fR with the return value \fBret\fR
to find out the reason.
.SH "SEE ALSO"
@@ -196,7 +196,7 @@ to find out the reason.
\&\fBSSL_set_connect_state\fR\|(3)
.SH "COPYRIGHT"
.IX Header "COPYRIGHT"
-Copyright 2002\-2016 The OpenSSL Project Authors. All Rights Reserved.
+Copyright 2002\-2020 The OpenSSL Project Authors. All Rights Reserved.
.PP
Licensed under the OpenSSL license (the \*(L"License\*(R"). You may not use
this file except in compliance with the License. You can obtain a copy
diff --git a/secure/lib/libcrypto/man/man3/SSL_export_keying_material.3 b/secure/lib/libcrypto/man/man3/SSL_export_keying_material.3
index fa4cdfdb179e9..a7d0aad140aaf 100644
--- a/secure/lib/libcrypto/man/man3/SSL_export_keying_material.3
+++ b/secure/lib/libcrypto/man/man3/SSL_export_keying_material.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_EXPORT_KEYING_MATERIAL 3"
-.TH SSL_EXPORT_KEYING_MATERIAL 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_EXPORT_KEYING_MATERIAL 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_extension_supported.3 b/secure/lib/libcrypto/man/man3/SSL_extension_supported.3
index 34691c5a330ae..ef7cef924616c 100644
--- a/secure/lib/libcrypto/man/man3/SSL_extension_supported.3
+++ b/secure/lib/libcrypto/man/man3/SSL_extension_supported.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_EXTENSION_SUPPORTED 3"
-.TH SSL_EXTENSION_SUPPORTED 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_EXTENSION_SUPPORTED 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_free.3 b/secure/lib/libcrypto/man/man3/SSL_free.3
index ed1596765f981..e60545face094 100644
--- a/secure/lib/libcrypto/man/man3/SSL_free.3
+++ b/secure/lib/libcrypto/man/man3/SSL_free.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_FREE 3"
-.TH SSL_FREE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_FREE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_get0_peer_scts.3 b/secure/lib/libcrypto/man/man3/SSL_get0_peer_scts.3
index 8797c4e1168b0..a96e74d85b880 100644
--- a/secure/lib/libcrypto/man/man3/SSL_get0_peer_scts.3
+++ b/secure/lib/libcrypto/man/man3/SSL_get0_peer_scts.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_GET0_PEER_SCTS 3"
-.TH SSL_GET0_PEER_SCTS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_GET0_PEER_SCTS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_get_SSL_CTX.3 b/secure/lib/libcrypto/man/man3/SSL_get_SSL_CTX.3
index 3da243622bb9a..a44ade981a213 100644
--- a/secure/lib/libcrypto/man/man3/SSL_get_SSL_CTX.3
+++ b/secure/lib/libcrypto/man/man3/SSL_get_SSL_CTX.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_GET_SSL_CTX 3"
-.TH SSL_GET_SSL_CTX 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_GET_SSL_CTX 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_get_all_async_fds.3 b/secure/lib/libcrypto/man/man3/SSL_get_all_async_fds.3
index 29f672ad1d1fc..6353b76e67357 100644
--- a/secure/lib/libcrypto/man/man3/SSL_get_all_async_fds.3
+++ b/secure/lib/libcrypto/man/man3/SSL_get_all_async_fds.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_GET_ALL_ASYNC_FDS 3"
-.TH SSL_GET_ALL_ASYNC_FDS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_GET_ALL_ASYNC_FDS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_get_ciphers.3 b/secure/lib/libcrypto/man/man3/SSL_get_ciphers.3
index 3975bcf0b93b5..34de235b533cf 100644
--- a/secure/lib/libcrypto/man/man3/SSL_get_ciphers.3
+++ b/secure/lib/libcrypto/man/man3/SSL_get_ciphers.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_GET_CIPHERS 3"
-.TH SSL_GET_CIPHERS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_GET_CIPHERS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_get_client_random.3 b/secure/lib/libcrypto/man/man3/SSL_get_client_random.3
index 87717bcc0dfa5..f45977ee0f70c 100644
--- a/secure/lib/libcrypto/man/man3/SSL_get_client_random.3
+++ b/secure/lib/libcrypto/man/man3/SSL_get_client_random.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_GET_CLIENT_RANDOM 3"
-.TH SSL_GET_CLIENT_RANDOM 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_GET_CLIENT_RANDOM 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_get_current_cipher.3 b/secure/lib/libcrypto/man/man3/SSL_get_current_cipher.3
index c91ba56a04c5d..5047c2660dc41 100644
--- a/secure/lib/libcrypto/man/man3/SSL_get_current_cipher.3
+++ b/secure/lib/libcrypto/man/man3/SSL_get_current_cipher.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_GET_CURRENT_CIPHER 3"
-.TH SSL_GET_CURRENT_CIPHER 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_GET_CURRENT_CIPHER 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_get_default_timeout.3 b/secure/lib/libcrypto/man/man3/SSL_get_default_timeout.3
index 1949f4f60289c..80ae2626f1bbe 100644
--- a/secure/lib/libcrypto/man/man3/SSL_get_default_timeout.3
+++ b/secure/lib/libcrypto/man/man3/SSL_get_default_timeout.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_GET_DEFAULT_TIMEOUT 3"
-.TH SSL_GET_DEFAULT_TIMEOUT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_GET_DEFAULT_TIMEOUT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_get_error.3 b/secure/lib/libcrypto/man/man3/SSL_get_error.3
index 981e5a55d23a4..79158a6806601 100644
--- a/secure/lib/libcrypto/man/man3/SSL_get_error.3
+++ b/secure/lib/libcrypto/man/man3/SSL_get_error.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_GET_ERROR 3"
-.TH SSL_GET_ERROR 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_GET_ERROR 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_get_extms_support.3 b/secure/lib/libcrypto/man/man3/SSL_get_extms_support.3
index e35972e489cf4..bc6b43e23c4cf 100644
--- a/secure/lib/libcrypto/man/man3/SSL_get_extms_support.3
+++ b/secure/lib/libcrypto/man/man3/SSL_get_extms_support.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_GET_EXTMS_SUPPORT 3"
-.TH SSL_GET_EXTMS_SUPPORT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_GET_EXTMS_SUPPORT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_get_fd.3 b/secure/lib/libcrypto/man/man3/SSL_get_fd.3
index 1dcd5bab8d2d9..06078f2ad1758 100644
--- a/secure/lib/libcrypto/man/man3/SSL_get_fd.3
+++ b/secure/lib/libcrypto/man/man3/SSL_get_fd.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_GET_FD 3"
-.TH SSL_GET_FD 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_GET_FD 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_get_peer_cert_chain.3 b/secure/lib/libcrypto/man/man3/SSL_get_peer_cert_chain.3
index 4f8fe4632a944..6365a2dd68545 100644
--- a/secure/lib/libcrypto/man/man3/SSL_get_peer_cert_chain.3
+++ b/secure/lib/libcrypto/man/man3/SSL_get_peer_cert_chain.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_GET_PEER_CERT_CHAIN 3"
-.TH SSL_GET_PEER_CERT_CHAIN 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_GET_PEER_CERT_CHAIN 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_get_peer_certificate.3 b/secure/lib/libcrypto/man/man3/SSL_get_peer_certificate.3
index 01eae91d8f679..fda43afb50ea8 100644
--- a/secure/lib/libcrypto/man/man3/SSL_get_peer_certificate.3
+++ b/secure/lib/libcrypto/man/man3/SSL_get_peer_certificate.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_GET_PEER_CERTIFICATE 3"
-.TH SSL_GET_PEER_CERTIFICATE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_GET_PEER_CERTIFICATE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_get_peer_signature_nid.3 b/secure/lib/libcrypto/man/man3/SSL_get_peer_signature_nid.3
index a9c1cde0a6af4..700d5d0e8ea77 100644
--- a/secure/lib/libcrypto/man/man3/SSL_get_peer_signature_nid.3
+++ b/secure/lib/libcrypto/man/man3/SSL_get_peer_signature_nid.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_GET_PEER_SIGNATURE_NID 3"
-.TH SSL_GET_PEER_SIGNATURE_NID 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_GET_PEER_SIGNATURE_NID 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_get_peer_tmp_key.3 b/secure/lib/libcrypto/man/man3/SSL_get_peer_tmp_key.3
index fc37dd06cf840..06b271785dd1b 100644
--- a/secure/lib/libcrypto/man/man3/SSL_get_peer_tmp_key.3
+++ b/secure/lib/libcrypto/man/man3/SSL_get_peer_tmp_key.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_GET_PEER_TMP_KEY 3"
-.TH SSL_GET_PEER_TMP_KEY 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_GET_PEER_TMP_KEY 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_get_psk_identity.3 b/secure/lib/libcrypto/man/man3/SSL_get_psk_identity.3
index 3b61985d2d008..c315eec77f93e 100644
--- a/secure/lib/libcrypto/man/man3/SSL_get_psk_identity.3
+++ b/secure/lib/libcrypto/man/man3/SSL_get_psk_identity.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_GET_PSK_IDENTITY 3"
-.TH SSL_GET_PSK_IDENTITY 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_GET_PSK_IDENTITY 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_get_rbio.3 b/secure/lib/libcrypto/man/man3/SSL_get_rbio.3
index 0347858466532..d1807785b9194 100644
--- a/secure/lib/libcrypto/man/man3/SSL_get_rbio.3
+++ b/secure/lib/libcrypto/man/man3/SSL_get_rbio.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_GET_RBIO 3"
-.TH SSL_GET_RBIO 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_GET_RBIO 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_get_session.3 b/secure/lib/libcrypto/man/man3/SSL_get_session.3
index edf036fc0ab09..fa23ba99cbc91 100644
--- a/secure/lib/libcrypto/man/man3/SSL_get_session.3
+++ b/secure/lib/libcrypto/man/man3/SSL_get_session.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_GET_SESSION 3"
-.TH SSL_GET_SESSION 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_GET_SESSION 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_get_shared_sigalgs.3 b/secure/lib/libcrypto/man/man3/SSL_get_shared_sigalgs.3
index c4084ee1c21be..535dbb7d65324 100644
--- a/secure/lib/libcrypto/man/man3/SSL_get_shared_sigalgs.3
+++ b/secure/lib/libcrypto/man/man3/SSL_get_shared_sigalgs.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_GET_SHARED_SIGALGS 3"
-.TH SSL_GET_SHARED_SIGALGS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_GET_SHARED_SIGALGS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_get_verify_result.3 b/secure/lib/libcrypto/man/man3/SSL_get_verify_result.3
index 3231f4f9380c2..f3a1bd645f387 100644
--- a/secure/lib/libcrypto/man/man3/SSL_get_verify_result.3
+++ b/secure/lib/libcrypto/man/man3/SSL_get_verify_result.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_GET_VERIFY_RESULT 3"
-.TH SSL_GET_VERIFY_RESULT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_GET_VERIFY_RESULT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_get_version.3 b/secure/lib/libcrypto/man/man3/SSL_get_version.3
index 00c4800b61cbf..01119ceb3fc66 100644
--- a/secure/lib/libcrypto/man/man3/SSL_get_version.3
+++ b/secure/lib/libcrypto/man/man3/SSL_get_version.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_GET_VERSION 3"
-.TH SSL_GET_VERSION 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_GET_VERSION 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_in_init.3 b/secure/lib/libcrypto/man/man3/SSL_in_init.3
index 5c22629c168fe..5f5fb73f7cdc2 100644
--- a/secure/lib/libcrypto/man/man3/SSL_in_init.3
+++ b/secure/lib/libcrypto/man/man3/SSL_in_init.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_IN_INIT 3"
-.TH SSL_IN_INIT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_IN_INIT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_key_update.3 b/secure/lib/libcrypto/man/man3/SSL_key_update.3
index b69af2cdad35c..6a70f093476db 100644
--- a/secure/lib/libcrypto/man/man3/SSL_key_update.3
+++ b/secure/lib/libcrypto/man/man3/SSL_key_update.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_KEY_UPDATE 3"
-.TH SSL_KEY_UPDATE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_KEY_UPDATE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_library_init.3 b/secure/lib/libcrypto/man/man3/SSL_library_init.3
index fc2c884623a91..30ec4d694e7b4 100644
--- a/secure/lib/libcrypto/man/man3/SSL_library_init.3
+++ b/secure/lib/libcrypto/man/man3/SSL_library_init.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_LIBRARY_INIT 3"
-.TH SSL_LIBRARY_INIT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_LIBRARY_INIT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_load_client_CA_file.3 b/secure/lib/libcrypto/man/man3/SSL_load_client_CA_file.3
index 3d39648af7fcb..562116b9277cf 100644
--- a/secure/lib/libcrypto/man/man3/SSL_load_client_CA_file.3
+++ b/secure/lib/libcrypto/man/man3/SSL_load_client_CA_file.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,24 +133,36 @@
.\" ========================================================================
.\"
.IX Title "SSL_LOAD_CLIENT_CA_FILE 3"
-.TH SSL_LOAD_CLIENT_CA_FILE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_LOAD_CLIENT_CA_FILE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
.nh
.SH "NAME"
-SSL_load_client_CA_file \- load certificate names from file
+SSL_load_client_CA_file, SSL_add_file_cert_subjects_to_stack, SSL_add_dir_cert_subjects_to_stack \&\- load certificate names
.SH "SYNOPSIS"
.IX Header "SYNOPSIS"
.Vb 1
\& #include <openssl/ssl.h>
\&
\& STACK_OF(X509_NAME) *SSL_load_client_CA_file(const char *file);
+\&
+\& int SSL_add_file_cert_subjects_to_stack(STACK_OF(X509_NAME) *stack,
+\& const char *file)
+\& int SSL_add_dir_cert_subjects_to_stack(STACK_OF(X509_NAME) *stack,
+\& const char *dir)
.Ve
.SH "DESCRIPTION"
.IX Header "DESCRIPTION"
-\&\fBSSL_load_client_CA_file()\fR reads certificates from \fBfile\fR and returns
+\&\fBSSL_load_client_CA_file()\fR reads certificates from \fIfile\fR and returns
a \s-1STACK_OF\s0(X509_NAME) with the subject names found.
+.PP
+\&\fBSSL_add_file_cert_subjects_to_stack()\fR reads certificates from \fIfile\fR,
+and adds their subject name to the already existing \fIstack\fR.
+.PP
+\&\fBSSL_add_dir_cert_subjects_to_stack()\fR reads certificates from every
+file in the directory \fIdir\fR, and adds their subject name to the
+already existing \fIstack\fR.
.SH "NOTES"
.IX Header "NOTES"
\&\fBSSL_load_client_CA_file()\fR reads a file of \s-1PEM\s0 formatted certificates and
diff --git a/secure/lib/libcrypto/man/man3/SSL_new.3 b/secure/lib/libcrypto/man/man3/SSL_new.3
index 4595a101f1213..bc33924e7f2cd 100644
--- a/secure/lib/libcrypto/man/man3/SSL_new.3
+++ b/secure/lib/libcrypto/man/man3/SSL_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_NEW 3"
-.TH SSL_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_pending.3 b/secure/lib/libcrypto/man/man3/SSL_pending.3
index 7ef93680808e2..dfd37c8a4ea31 100644
--- a/secure/lib/libcrypto/man/man3/SSL_pending.3
+++ b/secure/lib/libcrypto/man/man3/SSL_pending.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_PENDING 3"
-.TH SSL_PENDING 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_PENDING 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_read.3 b/secure/lib/libcrypto/man/man3/SSL_read.3
index 0acfe6574ccb5..6584df61e23ba 100644
--- a/secure/lib/libcrypto/man/man3/SSL_read.3
+++ b/secure/lib/libcrypto/man/man3/SSL_read.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_READ 3"
-.TH SSL_READ 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_READ 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_read_early_data.3 b/secure/lib/libcrypto/man/man3/SSL_read_early_data.3
index d2c487130cb18..bd525d42e66cf 100644
--- a/secure/lib/libcrypto/man/man3/SSL_read_early_data.3
+++ b/secure/lib/libcrypto/man/man3/SSL_read_early_data.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_READ_EARLY_DATA 3"
-.TH SSL_READ_EARLY_DATA 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_READ_EARLY_DATA 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_rstate_string.3 b/secure/lib/libcrypto/man/man3/SSL_rstate_string.3
index 3696f4f4e3532..596ed0b78bc71 100644
--- a/secure/lib/libcrypto/man/man3/SSL_rstate_string.3
+++ b/secure/lib/libcrypto/man/man3/SSL_rstate_string.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_RSTATE_STRING 3"
-.TH SSL_RSTATE_STRING 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_RSTATE_STRING 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_session_reused.3 b/secure/lib/libcrypto/man/man3/SSL_session_reused.3
index 9a7bb5f6bac6f..03aa9e398deaf 100644
--- a/secure/lib/libcrypto/man/man3/SSL_session_reused.3
+++ b/secure/lib/libcrypto/man/man3/SSL_session_reused.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_SESSION_REUSED 3"
-.TH SSL_SESSION_REUSED 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_SESSION_REUSED 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_set1_host.3 b/secure/lib/libcrypto/man/man3/SSL_set1_host.3
index 01621f4679c0c..5f0271b200693 100644
--- a/secure/lib/libcrypto/man/man3/SSL_set1_host.3
+++ b/secure/lib/libcrypto/man/man3/SSL_set1_host.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_SET1_HOST 3"
-.TH SSL_SET1_HOST 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_SET1_HOST 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_set_bio.3 b/secure/lib/libcrypto/man/man3/SSL_set_bio.3
index df9bba45a8711..0f39f63d7f7f9 100644
--- a/secure/lib/libcrypto/man/man3/SSL_set_bio.3
+++ b/secure/lib/libcrypto/man/man3/SSL_set_bio.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_SET_BIO 3"
-.TH SSL_SET_BIO 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_SET_BIO 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_set_connect_state.3 b/secure/lib/libcrypto/man/man3/SSL_set_connect_state.3
index 850c360e2632a..9a3d6935c5f64 100644
--- a/secure/lib/libcrypto/man/man3/SSL_set_connect_state.3
+++ b/secure/lib/libcrypto/man/man3/SSL_set_connect_state.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_SET_CONNECT_STATE 3"
-.TH SSL_SET_CONNECT_STATE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_SET_CONNECT_STATE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_set_fd.3 b/secure/lib/libcrypto/man/man3/SSL_set_fd.3
index bbab70ce8a9f5..44f388d17e23f 100644
--- a/secure/lib/libcrypto/man/man3/SSL_set_fd.3
+++ b/secure/lib/libcrypto/man/man3/SSL_set_fd.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_SET_FD 3"
-.TH SSL_SET_FD 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_SET_FD 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_set_session.3 b/secure/lib/libcrypto/man/man3/SSL_set_session.3
index 0d0533c866ee5..391eadd03c2d0 100644
--- a/secure/lib/libcrypto/man/man3/SSL_set_session.3
+++ b/secure/lib/libcrypto/man/man3/SSL_set_session.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_SET_SESSION 3"
-.TH SSL_SET_SESSION 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_SET_SESSION 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_set_shutdown.3 b/secure/lib/libcrypto/man/man3/SSL_set_shutdown.3
index e76e2b68bdab6..9e3fa0aef1344 100644
--- a/secure/lib/libcrypto/man/man3/SSL_set_shutdown.3
+++ b/secure/lib/libcrypto/man/man3/SSL_set_shutdown.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_SET_SHUTDOWN 3"
-.TH SSL_SET_SHUTDOWN 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_SET_SHUTDOWN 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_set_verify_result.3 b/secure/lib/libcrypto/man/man3/SSL_set_verify_result.3
index 09552f2f8db64..3da55bf24b28b 100644
--- a/secure/lib/libcrypto/man/man3/SSL_set_verify_result.3
+++ b/secure/lib/libcrypto/man/man3/SSL_set_verify_result.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_SET_VERIFY_RESULT 3"
-.TH SSL_SET_VERIFY_RESULT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_SET_VERIFY_RESULT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_shutdown.3 b/secure/lib/libcrypto/man/man3/SSL_shutdown.3
index f74ea8d095b83..32cb7df6dcaab 100644
--- a/secure/lib/libcrypto/man/man3/SSL_shutdown.3
+++ b/secure/lib/libcrypto/man/man3/SSL_shutdown.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_SHUTDOWN 3"
-.TH SSL_SHUTDOWN 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_SHUTDOWN 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_state_string.3 b/secure/lib/libcrypto/man/man3/SSL_state_string.3
index a331aa5443461..4f2d7191a95b4 100644
--- a/secure/lib/libcrypto/man/man3/SSL_state_string.3
+++ b/secure/lib/libcrypto/man/man3/SSL_state_string.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_STATE_STRING 3"
-.TH SSL_STATE_STRING 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_STATE_STRING 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_want.3 b/secure/lib/libcrypto/man/man3/SSL_want.3
index 6a801319b9589..7bb9b0b8413d6 100644
--- a/secure/lib/libcrypto/man/man3/SSL_want.3
+++ b/secure/lib/libcrypto/man/man3/SSL_want.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_WANT 3"
-.TH SSL_WANT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_WANT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/SSL_write.3 b/secure/lib/libcrypto/man/man3/SSL_write.3
index 763769e638279..09fcc1c1b5569 100644
--- a/secure/lib/libcrypto/man/man3/SSL_write.3
+++ b/secure/lib/libcrypto/man/man3/SSL_write.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL_WRITE 3"
-.TH SSL_WRITE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL_WRITE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/UI_STRING.3 b/secure/lib/libcrypto/man/man3/UI_STRING.3
index 61109cec98bb1..cef42f8cc3d02 100644
--- a/secure/lib/libcrypto/man/man3/UI_STRING.3
+++ b/secure/lib/libcrypto/man/man3/UI_STRING.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "UI_STRING 3"
-.TH UI_STRING 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH UI_STRING 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/UI_UTIL_read_pw.3 b/secure/lib/libcrypto/man/man3/UI_UTIL_read_pw.3
index 40591abccc8c9..0159e6ccef683 100644
--- a/secure/lib/libcrypto/man/man3/UI_UTIL_read_pw.3
+++ b/secure/lib/libcrypto/man/man3/UI_UTIL_read_pw.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "UI_UTIL_READ_PW 3"
-.TH UI_UTIL_READ_PW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH UI_UTIL_READ_PW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/UI_create_method.3 b/secure/lib/libcrypto/man/man3/UI_create_method.3
index 0595e32610237..2ca01cd4459c1 100644
--- a/secure/lib/libcrypto/man/man3/UI_create_method.3
+++ b/secure/lib/libcrypto/man/man3/UI_create_method.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "UI_CREATE_METHOD 3"
-.TH UI_CREATE_METHOD 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH UI_CREATE_METHOD 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/UI_new.3 b/secure/lib/libcrypto/man/man3/UI_new.3
index 957dc5243ada5..1d8fc1d99e05c 100644
--- a/secure/lib/libcrypto/man/man3/UI_new.3
+++ b/secure/lib/libcrypto/man/man3/UI_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "UI_NEW 3"
-.TH UI_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH UI_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/X509V3_get_d2i.3 b/secure/lib/libcrypto/man/man3/X509V3_get_d2i.3
index cb8e597984e26..8f615c07892ee 100644
--- a/secure/lib/libcrypto/man/man3/X509V3_get_d2i.3
+++ b/secure/lib/libcrypto/man/man3/X509V3_get_d2i.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509V3_GET_D2I 3"
-.TH X509V3_GET_D2I 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509V3_GET_D2I 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/X509_ALGOR_dup.3 b/secure/lib/libcrypto/man/man3/X509_ALGOR_dup.3
index 3342bff2ea5f7..94b91beacfa8f 100644
--- a/secure/lib/libcrypto/man/man3/X509_ALGOR_dup.3
+++ b/secure/lib/libcrypto/man/man3/X509_ALGOR_dup.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509_ALGOR_DUP 3"
-.TH X509_ALGOR_DUP 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_ALGOR_DUP 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/X509_CRL_get0_by_serial.3 b/secure/lib/libcrypto/man/man3/X509_CRL_get0_by_serial.3
index 511585c2568a9..de9257eb49c14 100644
--- a/secure/lib/libcrypto/man/man3/X509_CRL_get0_by_serial.3
+++ b/secure/lib/libcrypto/man/man3/X509_CRL_get0_by_serial.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509_CRL_GET0_BY_SERIAL 3"
-.TH X509_CRL_GET0_BY_SERIAL 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_CRL_GET0_BY_SERIAL 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/X509_EXTENSION_set_object.3 b/secure/lib/libcrypto/man/man3/X509_EXTENSION_set_object.3
index 3c612c6840e78..fe67274d34c3d 100644
--- a/secure/lib/libcrypto/man/man3/X509_EXTENSION_set_object.3
+++ b/secure/lib/libcrypto/man/man3/X509_EXTENSION_set_object.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509_EXTENSION_SET_OBJECT 3"
-.TH X509_EXTENSION_SET_OBJECT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_EXTENSION_SET_OBJECT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/X509_LOOKUP.3 b/secure/lib/libcrypto/man/man3/X509_LOOKUP.3
new file mode 100644
index 0000000000000..18fc00247b0b3
--- /dev/null
+++ b/secure/lib/libcrypto/man/man3/X509_LOOKUP.3
@@ -0,0 +1,310 @@
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
+.\"
+.\" Standard preamble:
+.\" ========================================================================
+.de Sp \" Vertical space (when we can't use .PP)
+.if t .sp .5v
+.if n .sp
+..
+.de Vb \" Begin verbatim text
+.ft CW
+.nf
+.ne \\$1
+..
+.de Ve \" End verbatim text
+.ft R
+.fi
+..
+.\" Set up some character translations and predefined strings. \*(-- will
+.\" give an unbreakable dash, \*(PI will give pi, \*(L" will give a left
+.\" double quote, and \*(R" will give a right double quote. \*(C+ will
+.\" give a nicer C++. Capital omega is used to do unbreakable dashes and
+.\" therefore won't be available. \*(C` and \*(C' expand to `' in nroff,
+.\" nothing in troff, for use with C<>.
+.tr \(*W-
+.ds C+ C\v'-.1v'\h'-1p'\s-2+\h'-1p'+\s0\v'.1v'\h'-1p'
+.ie n \{\
+. ds -- \(*W-
+. ds PI pi
+. if (\n(.H=4u)&(1m=24u) .ds -- \(*W\h'-12u'\(*W\h'-12u'-\" diablo 10 pitch
+. if (\n(.H=4u)&(1m=20u) .ds -- \(*W\h'-12u'\(*W\h'-8u'-\" diablo 12 pitch
+. ds L" ""
+. ds R" ""
+. ds C` ""
+. ds C' ""
+'br\}
+.el\{\
+. ds -- \|\(em\|
+. ds PI \(*p
+. ds L" ``
+. ds R" ''
+. ds C`
+. ds C'
+'br\}
+.\"
+.\" Escape single quotes in literal strings from groff's Unicode transform.
+.ie \n(.g .ds Aq \(aq
+.el .ds Aq '
+.\"
+.\" If the F register is >0, we'll generate index entries on stderr for
+.\" titles (.TH), headers (.SH), subsections (.SS), items (.Ip), and index
+.\" entries marked with X<> in POD. Of course, you'll have to process the
+.\" output yourself in some meaningful fashion.
+.\"
+.\" Avoid warning from groff about undefined register 'F'.
+.de IX
+..
+.nr rF 0
+.if \n(.g .if rF .nr rF 1
+.if (\n(rF:(\n(.g==0)) \{\
+. if \nF \{\
+. de IX
+. tm Index:\\$1\t\\n%\t"\\$2"
+..
+. if !\nF==2 \{\
+. nr % 0
+. nr F 2
+. \}
+. \}
+.\}
+.rr rF
+.\"
+.\" Accent mark definitions (@(#)ms.acc 1.5 88/02/08 SMI; from UCB 4.2).
+.\" Fear. Run. Save yourself. No user-serviceable parts.
+. \" fudge factors for nroff and troff
+.if n \{\
+. ds #H 0
+. ds #V .8m
+. ds #F .3m
+. ds #[ \f1
+. ds #] \fP
+.\}
+.if t \{\
+. ds #H ((1u-(\\\\n(.fu%2u))*.13m)
+. ds #V .6m
+. ds #F 0
+. ds #[ \&
+. ds #] \&
+.\}
+. \" simple accents for nroff and troff
+.if n \{\
+. ds ' \&
+. ds ` \&
+. ds ^ \&
+. ds , \&
+. ds ~ ~
+. ds /
+.\}
+.if t \{\
+. ds ' \\k:\h'-(\\n(.wu*8/10-\*(#H)'\'\h"|\\n:u"
+. ds ` \\k:\h'-(\\n(.wu*8/10-\*(#H)'\`\h'|\\n:u'
+. ds ^ \\k:\h'-(\\n(.wu*10/11-\*(#H)'^\h'|\\n:u'
+. ds , \\k:\h'-(\\n(.wu*8/10)',\h'|\\n:u'
+. ds ~ \\k:\h'-(\\n(.wu-\*(#H-.1m)'~\h'|\\n:u'
+. ds / \\k:\h'-(\\n(.wu*8/10-\*(#H)'\z\(sl\h'|\\n:u'
+.\}
+. \" troff and (daisy-wheel) nroff accents
+.ds : \\k:\h'-(\\n(.wu*8/10-\*(#H+.1m+\*(#F)'\v'-\*(#V'\z.\h'.2m+\*(#F'.\h'|\\n:u'\v'\*(#V'
+.ds 8 \h'\*(#H'\(*b\h'-\*(#H'
+.ds o \\k:\h'-(\\n(.wu+\w'\(de'u-\*(#H)/2u'\v'-.3n'\*(#[\z\(de\v'.3n'\h'|\\n:u'\*(#]
+.ds d- \h'\*(#H'\(pd\h'-\w'~'u'\v'-.25m'\f2\(hy\fP\v'.25m'\h'-\*(#H'
+.ds D- D\\k:\h'-\w'D'u'\v'-.11m'\z\(hy\v'.11m'\h'|\\n:u'
+.ds th \*(#[\v'.3m'\s+1I\s-1\v'-.3m'\h'-(\w'I'u*2/3)'\s-1o\s+1\*(#]
+.ds Th \*(#[\s+2I\s-2\h'-\w'I'u*3/5'\v'-.3m'o\v'.3m'\*(#]
+.ds ae a\h'-(\w'a'u*4/10)'e
+.ds Ae A\h'-(\w'A'u*4/10)'E
+. \" corrections for vroff
+.if v .ds ~ \\k:\h'-(\\n(.wu*9/10-\*(#H)'\s-2\u~\d\s+2\h'|\\n:u'
+.if v .ds ^ \\k:\h'-(\\n(.wu*10/11-\*(#H)'\v'-.4m'^\v'.4m'\h'|\\n:u'
+. \" for low resolution devices (crt and lpr)
+.if \n(.H>23 .if \n(.V>19 \
+\{\
+. ds : e
+. ds 8 ss
+. ds o a
+. ds d- d\h'-1'\(ga
+. ds D- D\h'-1'\(hy
+. ds th \o'bp'
+. ds Th \o'LP'
+. ds ae ae
+. ds Ae AE
+.\}
+.rm #[ #] #H #V #F C
+.\" ========================================================================
+.\"
+.IX Title "X509_LOOKUP 3"
+.TH X509_LOOKUP 3 "2020-03-17" "1.1.1e" "OpenSSL"
+.\" For nroff, turn off justification. Always turn off hyphenation; it makes
+.\" way too many mistakes in technical documents.
+.if n .ad l
+.nh
+.SH "NAME"
+X509_LOOKUP, X509_LOOKUP_TYPE, X509_LOOKUP_new, X509_LOOKUP_free, X509_LOOKUP_init, X509_LOOKUP_shutdown, X509_LOOKUP_set_method_data, X509_LOOKUP_get_method_data, X509_LOOKUP_ctrl, X509_LOOKUP_load_file, X509_LOOKUP_add_dir, X509_LOOKUP_get_store, X509_LOOKUP_by_subject, X509_LOOKUP_by_issuer_serial, X509_LOOKUP_by_fingerprint, X509_LOOKUP_by_alias \&\- OpenSSL certificate lookup mechanisms
+.SH "SYNOPSIS"
+.IX Header "SYNOPSIS"
+.Vb 1
+\& #include <openssl/x509_vfy.h>
+\&
+\& typedef x509_lookup_st X509_LOOKUP;
+\&
+\& typedef enum X509_LOOKUP_TYPE;
+\&
+\& X509_LOOKUP *X509_LOOKUP_new(X509_LOOKUP_METHOD *method);
+\& int X509_LOOKUP_init(X509_LOOKUP *ctx);
+\& int X509_LOOKUP_shutdown(X509_LOOKUP *ctx);
+\& void X509_LOOKUP_free(X509_LOOKUP *ctx);
+\&
+\& int X509_LOOKUP_set_method_data(X509_LOOKUP *ctx, void *data);
+\& void *X509_LOOKUP_get_method_data(const X509_LOOKUP *ctx);
+\&
+\& int X509_LOOKUP_ctrl(X509_LOOKUP *ctx, int cmd, const char *argc,
+\& long argl, char **ret);
+\& int X509_LOOKUP_load_file(X509_LOOKUP *ctx, char *name, long type);
+\& int X509_LOOKUP_add_dir(X509_LOOKUP *ctx, char *name, long type);
+\&
+\& X509_STORE *X509_LOOKUP_get_store(const X509_LOOKUP *ctx);
+\&
+\& int X509_LOOKUP_by_subject(X509_LOOKUP *ctx, X509_LOOKUP_TYPE type,
+\& X509_NAME *name, X509_OBJECT *ret);
+\& int X509_LOOKUP_by_issuer_serial(X509_LOOKUP *ctx, X509_LOOKUP_TYPE type,
+\& X509_NAME *name, ASN1_INTEGER *serial,
+\& X509_OBJECT *ret);
+\& int X509_LOOKUP_by_fingerprint(X509_LOOKUP *ctx, X509_LOOKUP_TYPE type,
+\& const unsigned char *bytes, int len,
+\& X509_OBJECT *ret);
+\& int X509_LOOKUP_by_alias(X509_LOOKUP *ctx, X509_LOOKUP_TYPE type,
+\& const char *str, int len, X509_OBJECT *ret);
+.Ve
+.SH "DESCRIPTION"
+.IX Header "DESCRIPTION"
+The \fBX509_LOOKUP\fR structure holds the information needed to look up
+certificates and CRLs according to an associated \fBX509_LOOKUP_METHOD\fR\|(3).
+Multiple \fBX509_LOOKUP\fR instances can be added to an \fBX509_STORE\fR\|(3)
+to enable lookup in that store.
+.PP
+\&\fBX509_LOOKUP_new()\fR creates a new \fBX509_LOOKUP\fR using the given lookup
+\&\fImethod\fR.
+It can also be created by calling \fBX509_STORE_add_lookup\fR\|(3), which
+will associate an \fBX509_STORE\fR with the lookup mechanism.
+.PP
+\&\fBX509_LOOKUP_init()\fR initializes the internal state and resources as
+needed by the given \fBX509_LOOKUP\fR to do its work.
+.PP
+\&\fBX509_LOOKUP_shutdown()\fR tears down the internal state and resources of
+the given \fBX509_LOOKUP\fR.
+.PP
+\&\fBX509_LOOKUP_free()\fR destructs the given \fBX509_LOOKUP\fR.
+.PP
+\&\fBX509_LOOKUP_set_method_data()\fR associates a pointer to application data
+to the given \fBX509_LOOKUP\fR.
+.PP
+\&\fBX509_LOOKUP_get_method_data()\fR retrieves a pointer to application data
+from the given \fBX509_LOOKUP\fR.
+.PP
+\&\fBX509_LOOKUP_ctrl()\fR is used to set or get additional data to or from an
+\&\fBX509_LOOKUP\fR structure or its associated \fBX509_LOOKUP_METHOD\fR\|(3).
+The arguments of the control command are passed via \fIargc\fR and \fIargl\fR,
+its return value via \fI*ret\fR.
+The meaning of the arguments depends on the \fIcmd\fR number of the
+control command. In general, this function is not called directly, but
+wrapped by a macro call, see below.
+The control \fIcmd\fRs known to OpenSSL are discussed in more depth
+in \*(L"Control Commands\*(R".
+.PP
+\&\fBX509_LOOKUP_load_file()\fR passes a filename to be loaded immediately
+into the associated \fBX509_STORE\fR.
+\&\fItype\fR indicates what type of object is expected.
+This can only be used with a lookup using the implementation
+\&\fBX509_LOOKUP_file\fR\|(3).
+.PP
+\&\fBX509_LOOKUP_add_dir()\fR passes a directory specification from which
+certificates and CRLs are loaded on demand into the associated
+\&\fBX509_STORE\fR.
+\&\fItype\fR indicates what type of object is expected.
+This can only be used with a lookup using the implementation
+\&\fBX509_LOOKUP_hash_dir\fR\|(3).
+.PP
+\&\fBX509_LOOKUP_load_file()\fR, \fBX509_LOOKUP_add_dir()\fR,
+\&\fBX509_LOOKUP_add_store()\fR, and \fBX509_LOOKUP_load_store()\fR are implemented
+as macros that use \fBX509_LOOKUP_ctrl()\fR.
+.PP
+\&\fBX509_LOOKUP_by_subject()\fR, \fBX509_LOOKUP_by_issuer_serial()\fR,
+\&\fBX509_LOOKUP_by_fingerprint()\fR, and \fBX509_LOOKUP_by_alias()\fR look up
+certificates and CRLs in the \fBX509_STORE\fR\|(3) associated with the
+\&\fBX509_LOOKUP\fR using different criteria, where the looked up object is
+stored in \fIret\fR.
+Some of the underlying \fBX509_LOOKUP_METHOD\fRs will also cache objects
+matching the criteria in the associated \fBX509_STORE\fR, which makes it
+possible to handle cases where the criteria have more than one hit.
+.SS "File Types"
+.IX Subsection "File Types"
+\&\fBX509_LOOKUP_load_file()\fR and \fBX509_LOOKUP_add_dir()\fR take a \fItype\fR,
+which can be one of the following:
+.IP "\fBX509_FILETYPE_PEM\fR" 4
+.IX Item "X509_FILETYPE_PEM"
+The file or files that are loaded are expected to be in \s-1PEM\s0 format.
+.IP "\fBX509_FILETYPE_ASN1\fR" 4
+.IX Item "X509_FILETYPE_ASN1"
+The file or files that are loaded are expected to be in raw \s-1DER\s0 format.
+.IP "\fBX509_FILETYPE_DEFAULT\fR" 4
+.IX Item "X509_FILETYPE_DEFAULT"
+The default certificate file or directory is used. In this case,
+\&\fIname\fR is ignored.
+.SS "Control Commands"
+.IX Subsection "Control Commands"
+The \fBX509_LOOKUP_METHOD\fRs built into OpenSSL recognise the following
+\&\fBX509_LOOKUP_ctrl()\fR \fIcmd\fRs:
+.IP "\fBX509_L_FILE_LOAD\fR" 4
+.IX Item "X509_L_FILE_LOAD"
+This is the command that \fBX509_LOOKUP_load_file()\fR uses.
+The filename is passed in \fIargc\fR, and the type in \fIargl\fR.
+.IP "\fBX509_L_ADD_DIR\fR" 4
+.IX Item "X509_L_ADD_DIR"
+This is the command that \fBX509_LOOKUP_add_dir()\fR uses.
+The directory specification is passed in \fIargc\fR, and the type in
+\&\fIargl\fR.
+.IP "\fBX509_L_ADD_STORE\fR" 4
+.IX Item "X509_L_ADD_STORE"
+This is the command that \fBX509_LOOKUP_add_store()\fR uses.
+The \s-1URI\s0 is passed in \fIargc\fR.
+.IP "\fBX509_L_LOAD_STORE\fR" 4
+.IX Item "X509_L_LOAD_STORE"
+This is the command that \fBX509_LOOKUP_load_store()\fR uses.
+The \s-1URI\s0 is passed in \fIargc\fR.
+.SH "RETURN VALUES"
+.IX Header "RETURN VALUES"
+\&\fBX509_LOOKUP_new()\fR returns an \fBX509_LOOKUP\fR pointer when successful,
+or \s-1NULL\s0 on error.
+.PP
+\&\fBX509_LOOKUP_init()\fR and \fBX509_LOOKUP_shutdown()\fR return 1 on success, or
+0 on error.
+.PP
+\&\fBX509_LOOKUP_ctrl()\fR returns \-1 if the \fBX509_LOOKUP\fR doesn't have an
+associated \fBX509_LOOKUP_METHOD\fR, or 1 if the
+doesn't have a control function.
+Otherwise, it returns what the control function in the
+\&\fBX509_LOOKUP_METHOD\fR returns, which is usually 1 on success and 0 in
+error.
+.IX Xref "509_LOOKUP_METHOD"
+.PP
+\&\fBX509_LOOKUP_get_store()\fR returns an \fBX509_STORE\fR pointer if there is
+one, otherwise \s-1NULL.\s0
+.PP
+\&\fBX509_LOOKUP_by_subject()\fR, \fBX509_LOOKUP_by_issuer_serial()\fR,
+\&\fBX509_LOOKUP_by_fingerprint()\fR, and \fBX509_LOOKUP_by_alias()\fR all return 0
+if there is no \fBX509_LOOKUP_METHOD\fR or that method doesn't implement
+the corresponding function.
+Otherwise, it returns what the corresponding function in the
+\&\fBX509_LOOKUP_METHOD\fR returns, which is usually 1 on success and 0 in
+error.
+.SH "SEE ALSO"
+.IX Header "SEE ALSO"
+\&\fBX509_LOOKUP_METHOD\fR\|(3), \fBX509_STORE\fR\|(3)
+.SH "COPYRIGHT"
+.IX Header "COPYRIGHT"
+Copyright 2020 The OpenSSL Project Authors. All Rights Reserved.
+.PP
+Licensed under the Apache License 2.0 (the \*(L"License\*(R"). You may not use
+this file except in compliance with the License. You can obtain a copy
+in the file \s-1LICENSE\s0 in the source distribution or at
+<https://www.openssl.org/source/license.html>.
diff --git a/secure/lib/libcrypto/man/man3/X509_LOOKUP_hash_dir.3 b/secure/lib/libcrypto/man/man3/X509_LOOKUP_hash_dir.3
index d3f4d8ed599b8..3708df2fd81f0 100644
--- a/secure/lib/libcrypto/man/man3/X509_LOOKUP_hash_dir.3
+++ b/secure/lib/libcrypto/man/man3/X509_LOOKUP_hash_dir.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509_LOOKUP_HASH_DIR 3"
-.TH X509_LOOKUP_HASH_DIR 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_LOOKUP_HASH_DIR 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/X509_LOOKUP_meth_new.3 b/secure/lib/libcrypto/man/man3/X509_LOOKUP_meth_new.3
index 98105df74981c..d1e59f9ebaf25 100644
--- a/secure/lib/libcrypto/man/man3/X509_LOOKUP_meth_new.3
+++ b/secure/lib/libcrypto/man/man3/X509_LOOKUP_meth_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,18 +133,20 @@
.\" ========================================================================
.\"
.IX Title "X509_LOOKUP_METH_NEW 3"
-.TH X509_LOOKUP_METH_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_LOOKUP_METH_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
.nh
.SH "NAME"
-X509_LOOKUP_meth_new, X509_LOOKUP_meth_free, X509_LOOKUP_meth_set_new_item, X509_LOOKUP_meth_get_new_item, X509_LOOKUP_meth_set_free, X509_LOOKUP_meth_get_free, X509_LOOKUP_meth_set_init, X509_LOOKUP_meth_get_init, X509_LOOKUP_meth_set_shutdown, X509_LOOKUP_meth_get_shutdown, X509_LOOKUP_ctrl_fn, X509_LOOKUP_meth_set_ctrl, X509_LOOKUP_meth_get_ctrl, X509_LOOKUP_get_by_subject_fn, X509_LOOKUP_meth_set_get_by_subject, X509_LOOKUP_meth_get_get_by_subject, X509_LOOKUP_get_by_issuer_serial_fn, X509_LOOKUP_meth_set_get_by_issuer_serial, X509_LOOKUP_meth_get_get_by_issuer_serial, X509_LOOKUP_get_by_fingerprint_fn, X509_LOOKUP_meth_set_get_by_fingerprint, X509_LOOKUP_meth_get_get_by_fingerprint, X509_LOOKUP_get_by_alias_fn, X509_LOOKUP_meth_set_get_by_alias, X509_LOOKUP_meth_get_get_by_alias, X509_LOOKUP_set_method_data, X509_LOOKUP_get_method_data, X509_LOOKUP_get_store, X509_OBJECT_set1_X509, X509_OBJECT_set1_X509_CRL \&\- Routines to build up X509_LOOKUP methods
+X509_LOOKUP_METHOD, X509_LOOKUP_meth_new, X509_LOOKUP_meth_free, X509_LOOKUP_meth_set_new_item, X509_LOOKUP_meth_get_new_item, X509_LOOKUP_meth_set_free, X509_LOOKUP_meth_get_free, X509_LOOKUP_meth_set_init, X509_LOOKUP_meth_get_init, X509_LOOKUP_meth_set_shutdown, X509_LOOKUP_meth_get_shutdown, X509_LOOKUP_ctrl_fn, X509_LOOKUP_meth_set_ctrl, X509_LOOKUP_meth_get_ctrl, X509_LOOKUP_get_by_subject_fn, X509_LOOKUP_meth_set_get_by_subject, X509_LOOKUP_meth_get_get_by_subject, X509_LOOKUP_get_by_issuer_serial_fn, X509_LOOKUP_meth_set_get_by_issuer_serial, X509_LOOKUP_meth_get_get_by_issuer_serial, X509_LOOKUP_get_by_fingerprint_fn, X509_LOOKUP_meth_set_get_by_fingerprint, X509_LOOKUP_meth_get_get_by_fingerprint, X509_LOOKUP_get_by_alias_fn, X509_LOOKUP_meth_set_get_by_alias, X509_LOOKUP_meth_get_get_by_alias, X509_OBJECT_set1_X509, X509_OBJECT_set1_X509_CRL \&\- Routines to build up X509_LOOKUP methods
.SH "SYNOPSIS"
.IX Header "SYNOPSIS"
.Vb 1
\& #include <openssl/x509_vfy.h>
\&
+\& typedef x509_lookup_method_st X509_LOOKUP_METHOD;
+\&
\& X509_LOOKUP_METHOD *X509_LOOKUP_meth_new(const char *name);
\& void X509_LOOKUP_meth_free(X509_LOOKUP_METHOD *method);
\&
@@ -213,11 +215,6 @@ X509_LOOKUP_meth_new, X509_LOOKUP_meth_free, X509_LOOKUP_meth_set_new_item, X509
\& X509_LOOKUP_get_by_alias_fn X509_LOOKUP_meth_get_get_by_alias(
\& const X509_LOOKUP_METHOD *method);
\&
-\& int X509_LOOKUP_set_method_data(X509_LOOKUP *ctx, void *data);
-\& void *X509_LOOKUP_get_method_data(const X509_LOOKUP *ctx);
-\&
-\& X509_STORE *X509_LOOKUP_get_store(const X509_LOOKUP *ctx);
-\&
\& int X509_OBJECT_set1_X509(X509_OBJECT *a, X509 *obj);
\& int X509_OBJECT_set1_X509_CRL(X509_OBJECT *a, X509_CRL *obj);
.Ve
@@ -239,7 +236,7 @@ method.
function that is called when an \fBX509_LOOKUP\fR object is created with
\&\fBX509_LOOKUP_new()\fR. If an X509_LOOKUP_METHOD requires any per\-X509_LOOKUP
specific data, the supplied new_item function should allocate this data and
-invoke \fBX509_LOOKUP_set_method_data()\fR.
+invoke \fBX509_LOOKUP_set_method_data\fR\|(3).
.PP
\&\fBX509_LOOKUP_get_free()\fR and \fBX509_LOOKUP_set_free()\fR get and set the function
that is used to free any method data that was allocated and set from within
@@ -247,7 +244,7 @@ new_item function.
.PP
\&\fBX509_LOOKUP_meth_get_init()\fR and \fBX509_LOOKUP_meth_set_init()\fR get and set the
function that is used to initialize the method data that was set with
-\&\fBX509_LOOKUP_set_method_data()\fR as part of the new_item routine.
+\&\fBX509_LOOKUP_set_method_data\fR\|(3) as part of the new_item routine.
.PP
\&\fBX509_LOOKUP_meth_get_shutdown()\fR and \fBX509_LOOKUP_meth_set_shutdown()\fR get and set
the function that is used to shut down the method data whose state was
@@ -284,9 +281,9 @@ increments the result's reference count.
.PP
Any method data that was created as a result of the new_item function
set by \fBX509_LOOKUP_meth_set_new_item()\fR can be accessed with
-\&\fBX509_LOOKUP_get_method_data()\fR. The \fBX509_STORE\fR object that owns the
-X509_LOOKUP may be accessed with \fBX509_LOOKUP_get_store()\fR. Successful lookups
-should return 1, and unsuccessful lookups should return 0.
+\&\fBX509_LOOKUP_get_method_data\fR\|(3). The \fBX509_STORE\fR object that owns the
+X509_LOOKUP may be accessed with \fBX509_LOOKUP_get_store\fR\|(3). Successful
+lookups should return 1, and unsuccessful lookups should return 0.
.PP
\&\fBX509_LOOKUP_get_get_by_subject()\fR, \fBX509_LOOKUP_get_get_by_issuer_serial()\fR,
\&\fBX509_LOOKUP_get_get_by_fingerprint()\fR, \fBX509_LOOKUP_get_get_by_alias()\fR retrieve
@@ -305,7 +302,7 @@ pointers.
The functions described here were added in OpenSSL 1.1.0i.
.SH "COPYRIGHT"
.IX Header "COPYRIGHT"
-Copyright 2018\-2019 The OpenSSL Project Authors. All Rights Reserved.
+Copyright 2018\-2020 The OpenSSL Project Authors. All Rights Reserved.
.PP
Licensed under the OpenSSL license (the \*(L"License\*(R"). You may not use
this file except in compliance with the License. You can obtain a copy
diff --git a/secure/lib/libcrypto/man/man3/X509_NAME_ENTRY_get_object.3 b/secure/lib/libcrypto/man/man3/X509_NAME_ENTRY_get_object.3
index 9f9720d81da60..9259d8399ba35 100644
--- a/secure/lib/libcrypto/man/man3/X509_NAME_ENTRY_get_object.3
+++ b/secure/lib/libcrypto/man/man3/X509_NAME_ENTRY_get_object.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509_NAME_ENTRY_GET_OBJECT 3"
-.TH X509_NAME_ENTRY_GET_OBJECT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_NAME_ENTRY_GET_OBJECT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/X509_NAME_add_entry_by_txt.3 b/secure/lib/libcrypto/man/man3/X509_NAME_add_entry_by_txt.3
index 0e8f8f88ca31a..216f54ff39f51 100644
--- a/secure/lib/libcrypto/man/man3/X509_NAME_add_entry_by_txt.3
+++ b/secure/lib/libcrypto/man/man3/X509_NAME_add_entry_by_txt.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509_NAME_ADD_ENTRY_BY_TXT 3"
-.TH X509_NAME_ADD_ENTRY_BY_TXT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_NAME_ADD_ENTRY_BY_TXT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/X509_NAME_get0_der.3 b/secure/lib/libcrypto/man/man3/X509_NAME_get0_der.3
index 4e0f4e1aa8e3c..56b987b7b591d 100644
--- a/secure/lib/libcrypto/man/man3/X509_NAME_get0_der.3
+++ b/secure/lib/libcrypto/man/man3/X509_NAME_get0_der.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509_NAME_GET0_DER 3"
-.TH X509_NAME_GET0_DER 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_NAME_GET0_DER 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/X509_NAME_get_index_by_NID.3 b/secure/lib/libcrypto/man/man3/X509_NAME_get_index_by_NID.3
index 4bdf4f791297b..50934838dea4b 100644
--- a/secure/lib/libcrypto/man/man3/X509_NAME_get_index_by_NID.3
+++ b/secure/lib/libcrypto/man/man3/X509_NAME_get_index_by_NID.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509_NAME_GET_INDEX_BY_NID 3"
-.TH X509_NAME_GET_INDEX_BY_NID 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_NAME_GET_INDEX_BY_NID 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/X509_NAME_print_ex.3 b/secure/lib/libcrypto/man/man3/X509_NAME_print_ex.3
index 105de0571f670..4018ba4dacc75 100644
--- a/secure/lib/libcrypto/man/man3/X509_NAME_print_ex.3
+++ b/secure/lib/libcrypto/man/man3/X509_NAME_print_ex.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509_NAME_PRINT_EX 3"
-.TH X509_NAME_PRINT_EX 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_NAME_PRINT_EX 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/X509_PUBKEY_new.3 b/secure/lib/libcrypto/man/man3/X509_PUBKEY_new.3
index 0e4ca164ac888..928cb862f6eb6 100644
--- a/secure/lib/libcrypto/man/man3/X509_PUBKEY_new.3
+++ b/secure/lib/libcrypto/man/man3/X509_PUBKEY_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509_PUBKEY_NEW 3"
-.TH X509_PUBKEY_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_PUBKEY_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/X509_SIG_get0.3 b/secure/lib/libcrypto/man/man3/X509_SIG_get0.3
index 08e5529eb61dd..40fe9b57455c7 100644
--- a/secure/lib/libcrypto/man/man3/X509_SIG_get0.3
+++ b/secure/lib/libcrypto/man/man3/X509_SIG_get0.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509_SIG_GET0 3"
-.TH X509_SIG_GET0 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_SIG_GET0 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/X509_STORE_CTX_get_error.3 b/secure/lib/libcrypto/man/man3/X509_STORE_CTX_get_error.3
index 152e9d9a82946..281f5f2e67212 100644
--- a/secure/lib/libcrypto/man/man3/X509_STORE_CTX_get_error.3
+++ b/secure/lib/libcrypto/man/man3/X509_STORE_CTX_get_error.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509_STORE_CTX_GET_ERROR 3"
-.TH X509_STORE_CTX_GET_ERROR 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_STORE_CTX_GET_ERROR 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/X509_STORE_CTX_new.3 b/secure/lib/libcrypto/man/man3/X509_STORE_CTX_new.3
index 836196cc4ff44..b4d80384c7b8c 100644
--- a/secure/lib/libcrypto/man/man3/X509_STORE_CTX_new.3
+++ b/secure/lib/libcrypto/man/man3/X509_STORE_CTX_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509_STORE_CTX_NEW 3"
-.TH X509_STORE_CTX_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_STORE_CTX_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
@@ -155,7 +155,7 @@ X509_STORE_CTX_new, X509_STORE_CTX_cleanup, X509_STORE_CTX_free, X509_STORE_CTX_
\& void X509_STORE_CTX_set0_trusted_stack(X509_STORE_CTX *ctx, STACK_OF(X509) *sk);
\&
\& void X509_STORE_CTX_set_cert(X509_STORE_CTX *ctx, X509 *x);
-\& STACK_OF(X509) *X509_STORE_CTX_get0_chain(X609_STORE_CTX *ctx);
+\& STACK_OF(X509) *X509_STORE_CTX_get0_chain(X509_STORE_CTX *ctx);
\& void X509_STORE_CTX_set0_verified_chain(X509_STORE_CTX *ctx, STACK_OF(X509) *chain);
\& void X509_STORE_CTX_set0_crls(X509_STORE_CTX *ctx, STACK_OF(X509_CRL) *sk);
\&
diff --git a/secure/lib/libcrypto/man/man3/X509_STORE_CTX_set_verify_cb.3 b/secure/lib/libcrypto/man/man3/X509_STORE_CTX_set_verify_cb.3
index df7e6a6e19420..441b5e815779e 100644
--- a/secure/lib/libcrypto/man/man3/X509_STORE_CTX_set_verify_cb.3
+++ b/secure/lib/libcrypto/man/man3/X509_STORE_CTX_set_verify_cb.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509_STORE_CTX_SET_VERIFY_CB 3"
-.TH X509_STORE_CTX_SET_VERIFY_CB 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_STORE_CTX_SET_VERIFY_CB 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/X509_STORE_add_cert.3 b/secure/lib/libcrypto/man/man3/X509_STORE_add_cert.3
index 5ec34234c3294..f6748cd350b4d 100644
--- a/secure/lib/libcrypto/man/man3/X509_STORE_add_cert.3
+++ b/secure/lib/libcrypto/man/man3/X509_STORE_add_cert.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,18 +133,20 @@
.\" ========================================================================
.\"
.IX Title "X509_STORE_ADD_CERT 3"
-.TH X509_STORE_ADD_CERT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_STORE_ADD_CERT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
.nh
.SH "NAME"
-X509_STORE_add_cert, X509_STORE_add_crl, X509_STORE_set_depth, X509_STORE_set_flags, X509_STORE_set_purpose, X509_STORE_set_trust, X509_STORE_load_locations, X509_STORE_set_default_paths \&\- X509_STORE manipulation
+X509_STORE, X509_STORE_add_cert, X509_STORE_add_crl, X509_STORE_set_depth, X509_STORE_set_flags, X509_STORE_set_purpose, X509_STORE_set_trust, X509_STORE_add_lookup, X509_STORE_load_locations, X509_STORE_set_default_paths \&\- X509_STORE manipulation
.SH "SYNOPSIS"
.IX Header "SYNOPSIS"
.Vb 1
\& #include <openssl/x509_vfy.h>
\&
+\& typedef x509_store_st X509_STORE;
+\&
\& int X509_STORE_add_cert(X509_STORE *ctx, X509 *x);
\& int X509_STORE_add_crl(X509_STORE *ctx, X509_CRL *x);
\& int X509_STORE_set_depth(X509_STORE *store, int depth);
@@ -152,6 +154,9 @@ X509_STORE_add_cert, X509_STORE_add_crl, X509_STORE_set_depth, X509_STORE_set_fl
\& int X509_STORE_set_purpose(X509_STORE *ctx, int purpose);
\& int X509_STORE_set_trust(X509_STORE *ctx, int trust);
\&
+\& X509_LOOKUP *X509_STORE_add_lookup(X509_STORE *store,
+\& X509_LOOKUP_METHOD *meth);
+\&
\& int X509_STORE_load_locations(X509_STORE *ctx,
\& const char *file, const char *dir);
\& int X509_STORE_set_default_paths(X509_STORE *ctx);
@@ -198,6 +203,11 @@ for the corresponding values used in certificate chain validation. Their
behavior is documented in the corresponding \fBX509_VERIFY_PARAM\fR manual
pages, e.g., \fBX509_VERIFY_PARAM_set_depth\fR\|(3).
.PP
+\&\fBX509_STORE_add_lookup()\fR finds or creates a \fBX509_LOOKUP\fR\|(3) with the
+\&\fBX509_LOOKUP_METHOD\fR\|(3) \fImeth\fR and adds it to the \fBX509_STORE\fR
+\&\fIstore\fR. This also associates the \fBX509_STORE\fR with the lookup, so
+\&\fBX509_LOOKUP\fR functions can look up objects in that store.
+.PP
\&\fBX509_STORE_load_locations()\fR loads trusted certificate(s) into an
\&\fBX509_STORE\fR from a given file and/or directory path. It is permitted
to specify just a file, just a directory, or both paths. The certificates
@@ -214,6 +224,9 @@ paths.
\&\fBX509_STORE_set_flags()\fR, \fBX509_STORE_set_purpose()\fR,
\&\fBX509_STORE_set_trust()\fR, \fBX509_STORE_load_locations()\fR, and
\&\fBX509_STORE_set_default_paths()\fR return 1 on success or 0 on failure.
+.PP
+\&\fBX509_STORE_add_lookup()\fR returns the found or created
+\&\fBX509_LOOKUP\fR\|(3), or \s-1NULL\s0 on error.
.SH "SEE ALSO"
.IX Header "SEE ALSO"
\&\fBX509_LOOKUP_hash_dir\fR\|(3).
@@ -222,7 +235,7 @@ paths.
\&\fBX509_STORE_get0_param\fR\|(3)
.SH "COPYRIGHT"
.IX Header "COPYRIGHT"
-Copyright 2017\-2019 The OpenSSL Project Authors. All Rights Reserved.
+Copyright 2017\-2020 The OpenSSL Project Authors. All Rights Reserved.
.PP
Licensed under the OpenSSL license (the \*(L"License\*(R"). You may not use
this file except in compliance with the License. You can obtain a copy
diff --git a/secure/lib/libcrypto/man/man3/X509_STORE_get0_param.3 b/secure/lib/libcrypto/man/man3/X509_STORE_get0_param.3
index 8196f17935a6d..4af6bb7cfd530 100644
--- a/secure/lib/libcrypto/man/man3/X509_STORE_get0_param.3
+++ b/secure/lib/libcrypto/man/man3/X509_STORE_get0_param.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509_STORE_GET0_PARAM 3"
-.TH X509_STORE_GET0_PARAM 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_STORE_GET0_PARAM 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/X509_STORE_new.3 b/secure/lib/libcrypto/man/man3/X509_STORE_new.3
index 291660b75abe7..d4efa2495345a 100644
--- a/secure/lib/libcrypto/man/man3/X509_STORE_new.3
+++ b/secure/lib/libcrypto/man/man3/X509_STORE_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509_STORE_NEW 3"
-.TH X509_STORE_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_STORE_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/X509_STORE_set_verify_cb_func.3 b/secure/lib/libcrypto/man/man3/X509_STORE_set_verify_cb_func.3
index afdc359fdb8d6..e7577db355a2a 100644
--- a/secure/lib/libcrypto/man/man3/X509_STORE_set_verify_cb_func.3
+++ b/secure/lib/libcrypto/man/man3/X509_STORE_set_verify_cb_func.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509_STORE_SET_VERIFY_CB_FUNC 3"
-.TH X509_STORE_SET_VERIFY_CB_FUNC 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_STORE_SET_VERIFY_CB_FUNC 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/X509_VERIFY_PARAM_set_flags.3 b/secure/lib/libcrypto/man/man3/X509_VERIFY_PARAM_set_flags.3
index 09d0211d2d366..be9aa6f823369 100644
--- a/secure/lib/libcrypto/man/man3/X509_VERIFY_PARAM_set_flags.3
+++ b/secure/lib/libcrypto/man/man3/X509_VERIFY_PARAM_set_flags.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509_VERIFY_PARAM_SET_FLAGS 3"
-.TH X509_VERIFY_PARAM_SET_FLAGS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_VERIFY_PARAM_SET_FLAGS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/X509_check_ca.3 b/secure/lib/libcrypto/man/man3/X509_check_ca.3
index 289b9369d0686..db856ab4981fd 100644
--- a/secure/lib/libcrypto/man/man3/X509_check_ca.3
+++ b/secure/lib/libcrypto/man/man3/X509_check_ca.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509_CHECK_CA 3"
-.TH X509_CHECK_CA 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_CHECK_CA 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/X509_check_host.3 b/secure/lib/libcrypto/man/man3/X509_check_host.3
index 9f6b088d42944..410c8fa5ca58b 100644
--- a/secure/lib/libcrypto/man/man3/X509_check_host.3
+++ b/secure/lib/libcrypto/man/man3/X509_check_host.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509_CHECK_HOST 3"
-.TH X509_CHECK_HOST 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_CHECK_HOST 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/X509_check_issued.3 b/secure/lib/libcrypto/man/man3/X509_check_issued.3
index e109f1dfcb40e..fee954a521ea1 100644
--- a/secure/lib/libcrypto/man/man3/X509_check_issued.3
+++ b/secure/lib/libcrypto/man/man3/X509_check_issued.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509_CHECK_ISSUED 3"
-.TH X509_CHECK_ISSUED 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_CHECK_ISSUED 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/X509_check_private_key.3 b/secure/lib/libcrypto/man/man3/X509_check_private_key.3
index 7f00cf473eb4b..6429a938bfa00 100644
--- a/secure/lib/libcrypto/man/man3/X509_check_private_key.3
+++ b/secure/lib/libcrypto/man/man3/X509_check_private_key.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509_CHECK_PRIVATE_KEY 3"
-.TH X509_CHECK_PRIVATE_KEY 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_CHECK_PRIVATE_KEY 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/X509_cmp.3 b/secure/lib/libcrypto/man/man3/X509_cmp.3
index 74c3882ea3221..d9239c63c3d3f 100644
--- a/secure/lib/libcrypto/man/man3/X509_cmp.3
+++ b/secure/lib/libcrypto/man/man3/X509_cmp.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509_CMP 3"
-.TH X509_CMP 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_CMP 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/X509_cmp_time.3 b/secure/lib/libcrypto/man/man3/X509_cmp_time.3
index aa0e6c9e32e80..d7fa43cdde136 100644
--- a/secure/lib/libcrypto/man/man3/X509_cmp_time.3
+++ b/secure/lib/libcrypto/man/man3/X509_cmp_time.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509_CMP_TIME 3"
-.TH X509_CMP_TIME 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_CMP_TIME 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/X509_digest.3 b/secure/lib/libcrypto/man/man3/X509_digest.3
index a976fca38ae7f..0fc59b23772c9 100644
--- a/secure/lib/libcrypto/man/man3/X509_digest.3
+++ b/secure/lib/libcrypto/man/man3/X509_digest.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509_DIGEST 3"
-.TH X509_DIGEST 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_DIGEST 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/X509_dup.3 b/secure/lib/libcrypto/man/man3/X509_dup.3
index 291044d591cac..79845b4e7e30d 100644
--- a/secure/lib/libcrypto/man/man3/X509_dup.3
+++ b/secure/lib/libcrypto/man/man3/X509_dup.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509_DUP 3"
-.TH X509_DUP 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_DUP 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/X509_get0_notBefore.3 b/secure/lib/libcrypto/man/man3/X509_get0_notBefore.3
index e9f67eb3cd84a..0910c4fcf731e 100644
--- a/secure/lib/libcrypto/man/man3/X509_get0_notBefore.3
+++ b/secure/lib/libcrypto/man/man3/X509_get0_notBefore.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509_GET0_NOTBEFORE 3"
-.TH X509_GET0_NOTBEFORE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_GET0_NOTBEFORE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/X509_get0_signature.3 b/secure/lib/libcrypto/man/man3/X509_get0_signature.3
index da6a641fa2fdf..bb46ce116a980 100644
--- a/secure/lib/libcrypto/man/man3/X509_get0_signature.3
+++ b/secure/lib/libcrypto/man/man3/X509_get0_signature.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509_GET0_SIGNATURE 3"
-.TH X509_GET0_SIGNATURE 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_GET0_SIGNATURE 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/X509_get0_uids.3 b/secure/lib/libcrypto/man/man3/X509_get0_uids.3
index cf86bfafea674..226c3a188165f 100644
--- a/secure/lib/libcrypto/man/man3/X509_get0_uids.3
+++ b/secure/lib/libcrypto/man/man3/X509_get0_uids.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509_GET0_UIDS 3"
-.TH X509_GET0_UIDS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_GET0_UIDS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/X509_get_extension_flags.3 b/secure/lib/libcrypto/man/man3/X509_get_extension_flags.3
index 7d132afcb6f46..f8d3a62d7b724 100644
--- a/secure/lib/libcrypto/man/man3/X509_get_extension_flags.3
+++ b/secure/lib/libcrypto/man/man3/X509_get_extension_flags.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509_GET_EXTENSION_FLAGS 3"
-.TH X509_GET_EXTENSION_FLAGS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_GET_EXTENSION_FLAGS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/X509_get_pubkey.3 b/secure/lib/libcrypto/man/man3/X509_get_pubkey.3
index 201d862f60729..82bccfdd603ea 100644
--- a/secure/lib/libcrypto/man/man3/X509_get_pubkey.3
+++ b/secure/lib/libcrypto/man/man3/X509_get_pubkey.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509_GET_PUBKEY 3"
-.TH X509_GET_PUBKEY 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_GET_PUBKEY 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/X509_get_serialNumber.3 b/secure/lib/libcrypto/man/man3/X509_get_serialNumber.3
index 9f7d1700c5eb6..e0e9be3fadd43 100644
--- a/secure/lib/libcrypto/man/man3/X509_get_serialNumber.3
+++ b/secure/lib/libcrypto/man/man3/X509_get_serialNumber.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509_GET_SERIALNUMBER 3"
-.TH X509_GET_SERIALNUMBER 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_GET_SERIALNUMBER 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/X509_get_subject_name.3 b/secure/lib/libcrypto/man/man3/X509_get_subject_name.3
index 778f302bf29ad..44c83eac4efcc 100644
--- a/secure/lib/libcrypto/man/man3/X509_get_subject_name.3
+++ b/secure/lib/libcrypto/man/man3/X509_get_subject_name.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509_GET_SUBJECT_NAME 3"
-.TH X509_GET_SUBJECT_NAME 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_GET_SUBJECT_NAME 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/X509_get_version.3 b/secure/lib/libcrypto/man/man3/X509_get_version.3
index 2c20c6df2d5c1..7cc29a7977f97 100644
--- a/secure/lib/libcrypto/man/man3/X509_get_version.3
+++ b/secure/lib/libcrypto/man/man3/X509_get_version.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509_GET_VERSION 3"
-.TH X509_GET_VERSION 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_GET_VERSION 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/X509_new.3 b/secure/lib/libcrypto/man/man3/X509_new.3
index ad0412dbd854b..1337cd2f65ab3 100644
--- a/secure/lib/libcrypto/man/man3/X509_new.3
+++ b/secure/lib/libcrypto/man/man3/X509_new.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509_NEW 3"
-.TH X509_NEW 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_NEW 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/X509_sign.3 b/secure/lib/libcrypto/man/man3/X509_sign.3
index 81bfc191e03f9..b1d9add9b917e 100644
--- a/secure/lib/libcrypto/man/man3/X509_sign.3
+++ b/secure/lib/libcrypto/man/man3/X509_sign.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509_SIGN 3"
-.TH X509_SIGN 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_SIGN 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/X509_verify_cert.3 b/secure/lib/libcrypto/man/man3/X509_verify_cert.3
index d745211a3bf23..2f1657ed976f0 100644
--- a/secure/lib/libcrypto/man/man3/X509_verify_cert.3
+++ b/secure/lib/libcrypto/man/man3/X509_verify_cert.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509_VERIFY_CERT 3"
-.TH X509_VERIFY_CERT 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509_VERIFY_CERT 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/X509v3_get_ext_by_NID.3 b/secure/lib/libcrypto/man/man3/X509v3_get_ext_by_NID.3
index 9de93b5ce8b27..952ae1f1c604d 100644
--- a/secure/lib/libcrypto/man/man3/X509v3_get_ext_by_NID.3
+++ b/secure/lib/libcrypto/man/man3/X509v3_get_ext_by_NID.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509V3_GET_EXT_BY_NID 3"
-.TH X509V3_GET_EXT_BY_NID 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509V3_GET_EXT_BY_NID 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/d2i_DHparams.3 b/secure/lib/libcrypto/man/man3/d2i_DHparams.3
index d4ba9be40c31a..21991fee54526 100644
--- a/secure/lib/libcrypto/man/man3/d2i_DHparams.3
+++ b/secure/lib/libcrypto/man/man3/d2i_DHparams.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "D2I_DHPARAMS 3"
-.TH D2I_DHPARAMS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH D2I_DHPARAMS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/d2i_PKCS8PrivateKey_bio.3 b/secure/lib/libcrypto/man/man3/d2i_PKCS8PrivateKey_bio.3
index aabcf3a27a674..db227925f2238 100644
--- a/secure/lib/libcrypto/man/man3/d2i_PKCS8PrivateKey_bio.3
+++ b/secure/lib/libcrypto/man/man3/d2i_PKCS8PrivateKey_bio.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "D2I_PKCS8PRIVATEKEY_BIO 3"
-.TH D2I_PKCS8PRIVATEKEY_BIO 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH D2I_PKCS8PRIVATEKEY_BIO 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/d2i_PrivateKey.3 b/secure/lib/libcrypto/man/man3/d2i_PrivateKey.3
index fe1f6493b36d9..b6af7afe63edf 100644
--- a/secure/lib/libcrypto/man/man3/d2i_PrivateKey.3
+++ b/secure/lib/libcrypto/man/man3/d2i_PrivateKey.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "D2I_PRIVATEKEY 3"
-.TH D2I_PRIVATEKEY 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH D2I_PRIVATEKEY 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/d2i_SSL_SESSION.3 b/secure/lib/libcrypto/man/man3/d2i_SSL_SESSION.3
index 1c12812fe9734..7f2fc20d4d3f6 100644
--- a/secure/lib/libcrypto/man/man3/d2i_SSL_SESSION.3
+++ b/secure/lib/libcrypto/man/man3/d2i_SSL_SESSION.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "D2I_SSL_SESSION 3"
-.TH D2I_SSL_SESSION 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH D2I_SSL_SESSION 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/d2i_X509.3 b/secure/lib/libcrypto/man/man3/d2i_X509.3
index 024c8dcefd636..13fbf650767a1 100644
--- a/secure/lib/libcrypto/man/man3/d2i_X509.3
+++ b/secure/lib/libcrypto/man/man3/d2i_X509.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,13 +133,13 @@
.\" ========================================================================
.\"
.IX Title "D2I_X509 3"
-.TH D2I_X509 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH D2I_X509 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
.nh
.SH "NAME"
-d2i_ACCESS_DESCRIPTION, d2i_ADMISSIONS, d2i_ADMISSION_SYNTAX, d2i_ASIdOrRange, d2i_ASIdentifierChoice, d2i_ASIdentifiers, d2i_ASN1_BIT_STRING, d2i_ASN1_BMPSTRING, d2i_ASN1_ENUMERATED, d2i_ASN1_GENERALIZEDTIME, d2i_ASN1_GENERALSTRING, d2i_ASN1_IA5STRING, d2i_ASN1_INTEGER, d2i_ASN1_NULL, d2i_ASN1_OBJECT, d2i_ASN1_OCTET_STRING, d2i_ASN1_PRINTABLE, d2i_ASN1_PRINTABLESTRING, d2i_ASN1_SEQUENCE_ANY, d2i_ASN1_SET_ANY, d2i_ASN1_T61STRING, d2i_ASN1_TIME, d2i_ASN1_TYPE, d2i_ASN1_UINTEGER, d2i_ASN1_UNIVERSALSTRING, d2i_ASN1_UTCTIME, d2i_ASN1_UTF8STRING, d2i_ASN1_VISIBLESTRING, d2i_ASRange, d2i_AUTHORITY_INFO_ACCESS, d2i_AUTHORITY_KEYID, d2i_BASIC_CONSTRAINTS, d2i_CERTIFICATEPOLICIES, d2i_CMS_ContentInfo, d2i_CMS_ReceiptRequest, d2i_CMS_bio, d2i_CRL_DIST_POINTS, d2i_DHxparams, d2i_DIRECTORYSTRING, d2i_DISPLAYTEXT, d2i_DIST_POINT, d2i_DIST_POINT_NAME, d2i_DSAPrivateKey, d2i_DSAPrivateKey_bio, d2i_DSAPrivateKey_fp, d2i_DSAPublicKey, d2i_DSA_PUBKEY, d2i_DSA_PUBKEY_bio, d2i_DSA_PUBKEY_fp, d2i_DSA_SIG, d2i_DSAparams, d2i_ECDSA_SIG, d2i_ECPKParameters, d2i_ECParameters, d2i_ECPrivateKey, d2i_ECPrivateKey_bio, d2i_ECPrivateKey_fp, d2i_EC_PUBKEY, d2i_EC_PUBKEY_bio, d2i_EC_PUBKEY_fp, d2i_EDIPARTYNAME, d2i_ESS_CERT_ID, d2i_ESS_ISSUER_SERIAL, d2i_ESS_SIGNING_CERT, d2i_EXTENDED_KEY_USAGE, d2i_GENERAL_NAME, d2i_GENERAL_NAMES, d2i_IPAddressChoice, d2i_IPAddressFamily, d2i_IPAddressOrRange, d2i_IPAddressRange, d2i_ISSUING_DIST_POINT, d2i_NAMING_AUTHORITY, d2i_NETSCAPE_CERT_SEQUENCE, d2i_NETSCAPE_SPKAC, d2i_NETSCAPE_SPKI, d2i_NOTICEREF, d2i_OCSP_BASICRESP, d2i_OCSP_CERTID, d2i_OCSP_CERTSTATUS, d2i_OCSP_CRLID, d2i_OCSP_ONEREQ, d2i_OCSP_REQINFO, d2i_OCSP_REQUEST, d2i_OCSP_RESPBYTES, d2i_OCSP_RESPDATA, d2i_OCSP_RESPID, d2i_OCSP_RESPONSE, d2i_OCSP_REVOKEDINFO, d2i_OCSP_SERVICELOC, d2i_OCSP_SIGNATURE, d2i_OCSP_SINGLERESP, d2i_OTHERNAME, d2i_PBE2PARAM, d2i_PBEPARAM, d2i_PBKDF2PARAM, d2i_PKCS12, d2i_PKCS12_BAGS, d2i_PKCS12_MAC_DATA, d2i_PKCS12_SAFEBAG, d2i_PKCS12_bio, d2i_PKCS12_fp, d2i_PKCS7, d2i_PKCS7_DIGEST, d2i_PKCS7_ENCRYPT, d2i_PKCS7_ENC_CONTENT, d2i_PKCS7_ENVELOPE, d2i_PKCS7_ISSUER_AND_SERIAL, d2i_PKCS7_RECIP_INFO, d2i_PKCS7_SIGNED, d2i_PKCS7_SIGNER_INFO, d2i_PKCS7_SIGN_ENVELOPE, d2i_PKCS7_bio, d2i_PKCS7_fp, d2i_PKCS8_PRIV_KEY_INFO, d2i_PKCS8_PRIV_KEY_INFO_bio, d2i_PKCS8_PRIV_KEY_INFO_fp, d2i_PKCS8_bio, d2i_PKCS8_fp, d2i_PKEY_USAGE_PERIOD, d2i_POLICYINFO, d2i_POLICYQUALINFO, d2i_PROFESSION_INFO, d2i_PROXY_CERT_INFO_EXTENSION, d2i_PROXY_POLICY, d2i_RSAPrivateKey, d2i_RSAPrivateKey_bio, d2i_RSAPrivateKey_fp, d2i_RSAPublicKey, d2i_RSAPublicKey_bio, d2i_RSAPublicKey_fp, d2i_RSA_OAEP_PARAMS, d2i_RSA_PSS_PARAMS, d2i_RSA_PUBKEY, d2i_RSA_PUBKEY_bio, d2i_RSA_PUBKEY_fp, d2i_SCRYPT_PARAMS, d2i_SCT_LIST, d2i_SXNET, d2i_SXNETID, d2i_TS_ACCURACY, d2i_TS_MSG_IMPRINT, d2i_TS_MSG_IMPRINT_bio, d2i_TS_MSG_IMPRINT_fp, d2i_TS_REQ, d2i_TS_REQ_bio, d2i_TS_REQ_fp, d2i_TS_RESP, d2i_TS_RESP_bio, d2i_TS_RESP_fp, d2i_TS_STATUS_INFO, d2i_TS_TST_INFO, d2i_TS_TST_INFO_bio, d2i_TS_TST_INFO_fp, d2i_USERNOTICE, d2i_X509, d2i_X509_ALGOR, d2i_X509_ALGORS, d2i_X509_ATTRIBUTE, d2i_X509_CERT_AUX, d2i_X509_CINF, d2i_X509_CRL, d2i_X509_CRL_INFO, d2i_X509_CRL_bio, d2i_X509_CRL_fp, d2i_X509_EXTENSION, d2i_X509_EXTENSIONS, d2i_X509_NAME, d2i_X509_NAME_ENTRY, d2i_X509_PUBKEY, d2i_X509_REQ, d2i_X509_REQ_INFO, d2i_X509_REQ_bio, d2i_X509_REQ_fp, d2i_X509_REVOKED, d2i_X509_SIG, d2i_X509_VAL, i2d_ACCESS_DESCRIPTION, i2d_ADMISSIONS, i2d_ADMISSION_SYNTAX, i2d_ASIdOrRange, i2d_ASIdentifierChoice, i2d_ASIdentifiers, i2d_ASN1_BIT_STRING, i2d_ASN1_BMPSTRING, i2d_ASN1_ENUMERATED, i2d_ASN1_GENERALIZEDTIME, i2d_ASN1_GENERALSTRING, i2d_ASN1_IA5STRING, i2d_ASN1_INTEGER, i2d_ASN1_NULL, i2d_ASN1_OBJECT, i2d_ASN1_OCTET_STRING, i2d_ASN1_PRINTABLE, i2d_ASN1_PRINTABLESTRING, i2d_ASN1_SEQUENCE_ANY, i2d_ASN1_SET_ANY, i2d_ASN1_T61STRING, i2d_ASN1_TIME, i2d_ASN1_TYPE, i2d_ASN1_UNIVERSALSTRING, i2d_ASN1_UTCTIME, i2d_ASN1_UTF8STRING, i2d_ASN1_VISIBLESTRING, i2d_ASN1_bio_stream, i2d_ASRange, i2d_AUTHORITY_INFO_ACCESS, i2d_AUTHORITY_KEYID, i2d_BASIC_CONSTRAINTS, i2d_CERTIFICATEPOLICIES, i2d_CMS_ContentInfo, i2d_CMS_ReceiptRequest, i2d_CMS_bio, i2d_CRL_DIST_POINTS, i2d_DHxparams, i2d_DIRECTORYSTRING, i2d_DISPLAYTEXT, i2d_DIST_POINT, i2d_DIST_POINT_NAME, i2d_DSAPrivateKey, i2d_DSAPrivateKey_bio, i2d_DSAPrivateKey_fp, i2d_DSAPublicKey, i2d_DSA_PUBKEY, i2d_DSA_PUBKEY_bio, i2d_DSA_PUBKEY_fp, i2d_DSA_SIG, i2d_DSAparams, i2d_ECDSA_SIG, i2d_ECPKParameters, i2d_ECParameters, i2d_ECPrivateKey, i2d_ECPrivateKey_bio, i2d_ECPrivateKey_fp, i2d_EC_PUBKEY, i2d_EC_PUBKEY_bio, i2d_EC_PUBKEY_fp, i2d_EDIPARTYNAME, i2d_ESS_CERT_ID, i2d_ESS_ISSUER_SERIAL, i2d_ESS_SIGNING_CERT, i2d_EXTENDED_KEY_USAGE, i2d_GENERAL_NAME, i2d_GENERAL_NAMES, i2d_IPAddressChoice, i2d_IPAddressFamily, i2d_IPAddressOrRange, i2d_IPAddressRange, i2d_ISSUING_DIST_POINT, i2d_NAMING_AUTHORITY, i2d_NETSCAPE_CERT_SEQUENCE, i2d_NETSCAPE_SPKAC, i2d_NETSCAPE_SPKI, i2d_NOTICEREF, i2d_OCSP_BASICRESP, i2d_OCSP_CERTID, i2d_OCSP_CERTSTATUS, i2d_OCSP_CRLID, i2d_OCSP_ONEREQ, i2d_OCSP_REQINFO, i2d_OCSP_REQUEST, i2d_OCSP_RESPBYTES, i2d_OCSP_RESPDATA, i2d_OCSP_RESPID, i2d_OCSP_RESPONSE, i2d_OCSP_REVOKEDINFO, i2d_OCSP_SERVICELOC, i2d_OCSP_SIGNATURE, i2d_OCSP_SINGLERESP, i2d_OTHERNAME, i2d_PBE2PARAM, i2d_PBEPARAM, i2d_PBKDF2PARAM, i2d_PKCS12, i2d_PKCS12_BAGS, i2d_PKCS12_MAC_DATA, i2d_PKCS12_SAFEBAG, i2d_PKCS12_bio, i2d_PKCS12_fp, i2d_PKCS7, i2d_PKCS7_DIGEST, i2d_PKCS7_ENCRYPT, i2d_PKCS7_ENC_CONTENT, i2d_PKCS7_ENVELOPE, i2d_PKCS7_ISSUER_AND_SERIAL, i2d_PKCS7_NDEF, i2d_PKCS7_RECIP_INFO, i2d_PKCS7_SIGNED, i2d_PKCS7_SIGNER_INFO, i2d_PKCS7_SIGN_ENVELOPE, i2d_PKCS7_bio, i2d_PKCS7_fp, i2d_PKCS8PrivateKeyInfo_bio, i2d_PKCS8PrivateKeyInfo_fp, i2d_PKCS8_PRIV_KEY_INFO, i2d_PKCS8_PRIV_KEY_INFO_bio, i2d_PKCS8_PRIV_KEY_INFO_fp, i2d_PKCS8_bio, i2d_PKCS8_fp, i2d_PKEY_USAGE_PERIOD, i2d_POLICYINFO, i2d_POLICYQUALINFO, i2d_PROFESSION_INFO, i2d_PROXY_CERT_INFO_EXTENSION, i2d_PROXY_POLICY, i2d_PublicKey, i2d_RSAPrivateKey, i2d_RSAPrivateKey_bio, i2d_RSAPrivateKey_fp, i2d_RSAPublicKey, i2d_RSAPublicKey_bio, i2d_RSAPublicKey_fp, i2d_RSA_OAEP_PARAMS, i2d_RSA_PSS_PARAMS, i2d_RSA_PUBKEY, i2d_RSA_PUBKEY_bio, i2d_RSA_PUBKEY_fp, i2d_SCRYPT_PARAMS, i2d_SCT_LIST, i2d_SXNET, i2d_SXNETID, i2d_TS_ACCURACY, i2d_TS_MSG_IMPRINT, i2d_TS_MSG_IMPRINT_bio, i2d_TS_MSG_IMPRINT_fp, i2d_TS_REQ, i2d_TS_REQ_bio, i2d_TS_REQ_fp, i2d_TS_RESP, i2d_TS_RESP_bio, i2d_TS_RESP_fp, i2d_TS_STATUS_INFO, i2d_TS_TST_INFO, i2d_TS_TST_INFO_bio, i2d_TS_TST_INFO_fp, i2d_USERNOTICE, i2d_X509, i2d_X509_ALGOR, i2d_X509_ALGORS, i2d_X509_ATTRIBUTE, i2d_X509_CERT_AUX, i2d_X509_CINF, i2d_X509_CRL, i2d_X509_CRL_INFO, i2d_X509_CRL_bio, i2d_X509_CRL_fp, i2d_X509_EXTENSION, i2d_X509_EXTENSIONS, i2d_X509_NAME, i2d_X509_NAME_ENTRY, i2d_X509_PUBKEY, i2d_X509_REQ, i2d_X509_REQ_INFO, i2d_X509_REQ_bio, i2d_X509_REQ_fp, i2d_X509_REVOKED, i2d_X509_SIG, i2d_X509_VAL, \&\- convert objects from/to ASN.1/DER representation
+d2i_ACCESS_DESCRIPTION, d2i_ADMISSIONS, d2i_ADMISSION_SYNTAX, d2i_ASIdOrRange, d2i_ASIdentifierChoice, d2i_ASIdentifiers, d2i_ASN1_BIT_STRING, d2i_ASN1_BMPSTRING, d2i_ASN1_ENUMERATED, d2i_ASN1_GENERALIZEDTIME, d2i_ASN1_GENERALSTRING, d2i_ASN1_IA5STRING, d2i_ASN1_INTEGER, d2i_ASN1_NULL, d2i_ASN1_OBJECT, d2i_ASN1_OCTET_STRING, d2i_ASN1_PRINTABLE, d2i_ASN1_PRINTABLESTRING, d2i_ASN1_SEQUENCE_ANY, d2i_ASN1_SET_ANY, d2i_ASN1_T61STRING, d2i_ASN1_TIME, d2i_ASN1_TYPE, d2i_ASN1_UINTEGER, d2i_ASN1_UNIVERSALSTRING, d2i_ASN1_UTCTIME, d2i_ASN1_UTF8STRING, d2i_ASN1_VISIBLESTRING, d2i_ASRange, d2i_AUTHORITY_INFO_ACCESS, d2i_AUTHORITY_KEYID, d2i_BASIC_CONSTRAINTS, d2i_CERTIFICATEPOLICIES, d2i_CMS_ContentInfo, d2i_CMS_ReceiptRequest, d2i_CMS_bio, d2i_CRL_DIST_POINTS, d2i_DHxparams, d2i_DIRECTORYSTRING, d2i_DISPLAYTEXT, d2i_DIST_POINT, d2i_DIST_POINT_NAME, d2i_DSAPrivateKey, d2i_DSAPrivateKey_bio, d2i_DSAPrivateKey_fp, d2i_DSAPublicKey, d2i_DSA_PUBKEY, d2i_DSA_PUBKEY_bio, d2i_DSA_PUBKEY_fp, d2i_DSA_SIG, d2i_DSAparams, d2i_ECDSA_SIG, d2i_ECPKParameters, d2i_ECParameters, d2i_ECPrivateKey, d2i_ECPrivateKey_bio, d2i_ECPrivateKey_fp, d2i_EC_PUBKEY, d2i_EC_PUBKEY_bio, d2i_EC_PUBKEY_fp, d2i_EDIPARTYNAME, d2i_ESS_CERT_ID, d2i_ESS_ISSUER_SERIAL, d2i_ESS_SIGNING_CERT, d2i_EXTENDED_KEY_USAGE, d2i_GENERAL_NAME, d2i_GENERAL_NAMES, d2i_IPAddressChoice, d2i_IPAddressFamily, d2i_IPAddressOrRange, d2i_IPAddressRange, d2i_ISSUING_DIST_POINT, d2i_NAMING_AUTHORITY, d2i_NETSCAPE_CERT_SEQUENCE, d2i_NETSCAPE_SPKAC, d2i_NETSCAPE_SPKI, d2i_NOTICEREF, d2i_OCSP_BASICRESP, d2i_OCSP_CERTID, d2i_OCSP_CERTSTATUS, d2i_OCSP_CRLID, d2i_OCSP_ONEREQ, d2i_OCSP_REQINFO, d2i_OCSP_REQUEST, d2i_OCSP_RESPBYTES, d2i_OCSP_RESPDATA, d2i_OCSP_RESPID, d2i_OCSP_RESPONSE, d2i_OCSP_REVOKEDINFO, d2i_OCSP_SERVICELOC, d2i_OCSP_SIGNATURE, d2i_OCSP_SINGLERESP, d2i_OTHERNAME, d2i_PBE2PARAM, d2i_PBEPARAM, d2i_PBKDF2PARAM, d2i_PKCS12, d2i_PKCS12_BAGS, d2i_PKCS12_MAC_DATA, d2i_PKCS12_SAFEBAG, d2i_PKCS12_bio, d2i_PKCS12_fp, d2i_PKCS7, d2i_PKCS7_DIGEST, d2i_PKCS7_ENCRYPT, d2i_PKCS7_ENC_CONTENT, d2i_PKCS7_ENVELOPE, d2i_PKCS7_ISSUER_AND_SERIAL, d2i_PKCS7_RECIP_INFO, d2i_PKCS7_SIGNED, d2i_PKCS7_SIGNER_INFO, d2i_PKCS7_SIGN_ENVELOPE, d2i_PKCS7_bio, d2i_PKCS7_fp, d2i_PKCS8_PRIV_KEY_INFO, d2i_PKCS8_PRIV_KEY_INFO_bio, d2i_PKCS8_PRIV_KEY_INFO_fp, d2i_PKCS8_bio, d2i_PKCS8_fp, d2i_PKEY_USAGE_PERIOD, d2i_POLICYINFO, d2i_POLICYQUALINFO, d2i_PROFESSION_INFO, d2i_PROXY_CERT_INFO_EXTENSION, d2i_PROXY_POLICY, d2i_RSAPrivateKey, d2i_RSAPrivateKey_bio, d2i_RSAPrivateKey_fp, d2i_RSAPublicKey, d2i_RSAPublicKey_bio, d2i_RSAPublicKey_fp, d2i_RSA_OAEP_PARAMS, d2i_RSA_PSS_PARAMS, d2i_RSA_PUBKEY, d2i_RSA_PUBKEY_bio, d2i_RSA_PUBKEY_fp, d2i_SCRYPT_PARAMS, d2i_SCT_LIST, d2i_SXNET, d2i_SXNETID, d2i_TS_ACCURACY, d2i_TS_MSG_IMPRINT, d2i_TS_MSG_IMPRINT_bio, d2i_TS_MSG_IMPRINT_fp, d2i_TS_REQ, d2i_TS_REQ_bio, d2i_TS_REQ_fp, d2i_TS_RESP, d2i_TS_RESP_bio, d2i_TS_RESP_fp, d2i_TS_STATUS_INFO, d2i_TS_TST_INFO, d2i_TS_TST_INFO_bio, d2i_TS_TST_INFO_fp, d2i_USERNOTICE, d2i_X509, d2i_X509_ALGOR, d2i_X509_ALGORS, d2i_X509_ATTRIBUTE, d2i_X509_CERT_AUX, d2i_X509_CINF, d2i_X509_CRL, d2i_X509_CRL_INFO, d2i_X509_CRL_bio, d2i_X509_CRL_fp, d2i_X509_EXTENSION, d2i_X509_EXTENSIONS, d2i_X509_NAME, d2i_X509_NAME_ENTRY, d2i_X509_PUBKEY, d2i_X509_REQ, d2i_X509_REQ_INFO, d2i_X509_REQ_bio, d2i_X509_REQ_fp, d2i_X509_REVOKED, d2i_X509_SIG, d2i_X509_VAL, i2d_ACCESS_DESCRIPTION, i2d_ADMISSIONS, i2d_ADMISSION_SYNTAX, i2d_ASIdOrRange, i2d_ASIdentifierChoice, i2d_ASIdentifiers, i2d_ASN1_BIT_STRING, i2d_ASN1_BMPSTRING, i2d_ASN1_ENUMERATED, i2d_ASN1_GENERALIZEDTIME, i2d_ASN1_GENERALSTRING, i2d_ASN1_IA5STRING, i2d_ASN1_INTEGER, i2d_ASN1_NULL, i2d_ASN1_OBJECT, i2d_ASN1_OCTET_STRING, i2d_ASN1_PRINTABLE, i2d_ASN1_PRINTABLESTRING, i2d_ASN1_SEQUENCE_ANY, i2d_ASN1_SET_ANY, i2d_ASN1_T61STRING, i2d_ASN1_TIME, i2d_ASN1_TYPE, i2d_ASN1_UNIVERSALSTRING, i2d_ASN1_UTCTIME, i2d_ASN1_UTF8STRING, i2d_ASN1_VISIBLESTRING, i2d_ASN1_bio_stream, i2d_ASRange, i2d_AUTHORITY_INFO_ACCESS, i2d_AUTHORITY_KEYID, i2d_BASIC_CONSTRAINTS, i2d_CERTIFICATEPOLICIES, i2d_CMS_ContentInfo, i2d_CMS_ReceiptRequest, i2d_CMS_bio, i2d_CRL_DIST_POINTS, i2d_DHxparams, i2d_DIRECTORYSTRING, i2d_DISPLAYTEXT, i2d_DIST_POINT, i2d_DIST_POINT_NAME, i2d_DSAPrivateKey, i2d_DSAPrivateKey_bio, i2d_DSAPrivateKey_fp, i2d_DSAPublicKey, i2d_DSA_PUBKEY, i2d_DSA_PUBKEY_bio, i2d_DSA_PUBKEY_fp, i2d_DSA_SIG, i2d_DSAparams, i2d_ECDSA_SIG, i2d_ECPKParameters, i2d_ECParameters, i2d_ECPrivateKey, i2d_ECPrivateKey_bio, i2d_ECPrivateKey_fp, i2d_EC_PUBKEY, i2d_EC_PUBKEY_bio, i2d_EC_PUBKEY_fp, i2d_EDIPARTYNAME, i2d_ESS_CERT_ID, i2d_ESS_ISSUER_SERIAL, i2d_ESS_SIGNING_CERT, i2d_EXTENDED_KEY_USAGE, i2d_GENERAL_NAME, i2d_GENERAL_NAMES, i2d_IPAddressChoice, i2d_IPAddressFamily, i2d_IPAddressOrRange, i2d_IPAddressRange, i2d_ISSUING_DIST_POINT, i2d_NAMING_AUTHORITY, i2d_NETSCAPE_CERT_SEQUENCE, i2d_NETSCAPE_SPKAC, i2d_NETSCAPE_SPKI, i2d_NOTICEREF, i2d_OCSP_BASICRESP, i2d_OCSP_CERTID, i2d_OCSP_CERTSTATUS, i2d_OCSP_CRLID, i2d_OCSP_ONEREQ, i2d_OCSP_REQINFO, i2d_OCSP_REQUEST, i2d_OCSP_RESPBYTES, i2d_OCSP_RESPDATA, i2d_OCSP_RESPID, i2d_OCSP_RESPONSE, i2d_OCSP_REVOKEDINFO, i2d_OCSP_SERVICELOC, i2d_OCSP_SIGNATURE, i2d_OCSP_SINGLERESP, i2d_OTHERNAME, i2d_PBE2PARAM, i2d_PBEPARAM, i2d_PBKDF2PARAM, i2d_PKCS12, i2d_PKCS12_BAGS, i2d_PKCS12_MAC_DATA, i2d_PKCS12_SAFEBAG, i2d_PKCS12_bio, i2d_PKCS12_fp, i2d_PKCS7, i2d_PKCS7_DIGEST, i2d_PKCS7_ENCRYPT, i2d_PKCS7_ENC_CONTENT, i2d_PKCS7_ENVELOPE, i2d_PKCS7_ISSUER_AND_SERIAL, i2d_PKCS7_NDEF, i2d_PKCS7_RECIP_INFO, i2d_PKCS7_SIGNED, i2d_PKCS7_SIGNER_INFO, i2d_PKCS7_SIGN_ENVELOPE, i2d_PKCS7_bio, i2d_PKCS7_fp, i2d_PKCS8PrivateKeyInfo_bio, i2d_PKCS8PrivateKeyInfo_fp, i2d_PKCS8_PRIV_KEY_INFO, i2d_PKCS8_PRIV_KEY_INFO_bio, i2d_PKCS8_PRIV_KEY_INFO_fp, i2d_PKCS8_bio, i2d_PKCS8_fp, i2d_PKEY_USAGE_PERIOD, i2d_POLICYINFO, i2d_POLICYQUALINFO, i2d_PROFESSION_INFO, i2d_PROXY_CERT_INFO_EXTENSION, i2d_PROXY_POLICY, i2d_RSAPrivateKey, i2d_RSAPrivateKey_bio, i2d_RSAPrivateKey_fp, i2d_RSAPublicKey, i2d_RSAPublicKey_bio, i2d_RSAPublicKey_fp, i2d_RSA_OAEP_PARAMS, i2d_RSA_PSS_PARAMS, i2d_RSA_PUBKEY, i2d_RSA_PUBKEY_bio, i2d_RSA_PUBKEY_fp, i2d_SCRYPT_PARAMS, i2d_SCT_LIST, i2d_SXNET, i2d_SXNETID, i2d_TS_ACCURACY, i2d_TS_MSG_IMPRINT, i2d_TS_MSG_IMPRINT_bio, i2d_TS_MSG_IMPRINT_fp, i2d_TS_REQ, i2d_TS_REQ_bio, i2d_TS_REQ_fp, i2d_TS_RESP, i2d_TS_RESP_bio, i2d_TS_RESP_fp, i2d_TS_STATUS_INFO, i2d_TS_TST_INFO, i2d_TS_TST_INFO_bio, i2d_TS_TST_INFO_fp, i2d_USERNOTICE, i2d_X509, i2d_X509_ALGOR, i2d_X509_ALGORS, i2d_X509_ATTRIBUTE, i2d_X509_CERT_AUX, i2d_X509_CINF, i2d_X509_CRL, i2d_X509_CRL_INFO, i2d_X509_CRL_bio, i2d_X509_CRL_fp, i2d_X509_EXTENSION, i2d_X509_EXTENSIONS, i2d_X509_NAME, i2d_X509_NAME_ENTRY, i2d_X509_PUBKEY, i2d_X509_REQ, i2d_X509_REQ_INFO, i2d_X509_REQ_bio, i2d_X509_REQ_fp, i2d_X509_REVOKED, i2d_X509_SIG, i2d_X509_VAL, \&\- convert objects from/to ASN.1/DER representation
.SH "SYNOPSIS"
.IX Header "SYNOPSIS"
.Vb 3
@@ -232,8 +232,8 @@ Represents an \s-1ASN1 OBJECT IDENTIFIER.\s0
.IP "\fBDHparams\fR" 4
.IX Item "DHparams"
Represents a PKCS#3 \s-1DH\s0 parameters structure.
-.IP "\fBDHparamx\fR" 4
-.IX Item "DHparamx"
+.IP "\fBDHxparams\fR" 4
+.IX Item "DHxparams"
Represents an \s-1ANSI X9.42 DH\s0 parameters structure.
.IP "\fB\s-1DSA_PUBKEY\s0\fR" 4
.IX Item "DSA_PUBKEY"
@@ -377,7 +377,7 @@ serialization. This is because some objects cache the encoding for
efficiency reasons.
.SH "COPYRIGHT"
.IX Header "COPYRIGHT"
-Copyright 1998\-2019 The OpenSSL Project Authors. All Rights Reserved.
+Copyright 1998\-2020 The OpenSSL Project Authors. All Rights Reserved.
.PP
Licensed under the OpenSSL license (the \*(L"License\*(R"). You may not use
this file except in compliance with the License. You can obtain a copy
diff --git a/secure/lib/libcrypto/man/man3/i2d_CMS_bio_stream.3 b/secure/lib/libcrypto/man/man3/i2d_CMS_bio_stream.3
index 36903b121e1a7..7358aa7d32587 100644
--- a/secure/lib/libcrypto/man/man3/i2d_CMS_bio_stream.3
+++ b/secure/lib/libcrypto/man/man3/i2d_CMS_bio_stream.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "I2D_CMS_BIO_STREAM 3"
-.TH I2D_CMS_BIO_STREAM 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH I2D_CMS_BIO_STREAM 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/i2d_PKCS7_bio_stream.3 b/secure/lib/libcrypto/man/man3/i2d_PKCS7_bio_stream.3
index 96e62e7fb4c9f..29fbf7df29789 100644
--- a/secure/lib/libcrypto/man/man3/i2d_PKCS7_bio_stream.3
+++ b/secure/lib/libcrypto/man/man3/i2d_PKCS7_bio_stream.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "I2D_PKCS7_BIO_STREAM 3"
-.TH I2D_PKCS7_BIO_STREAM 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH I2D_PKCS7_BIO_STREAM 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/i2d_re_X509_tbs.3 b/secure/lib/libcrypto/man/man3/i2d_re_X509_tbs.3
index 9fcbc89df3b54..ab29fc6859ea2 100644
--- a/secure/lib/libcrypto/man/man3/i2d_re_X509_tbs.3
+++ b/secure/lib/libcrypto/man/man3/i2d_re_X509_tbs.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "I2D_RE_X509_TBS 3"
-.TH I2D_RE_X509_TBS 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH I2D_RE_X509_TBS 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man3/o2i_SCT_LIST.3 b/secure/lib/libcrypto/man/man3/o2i_SCT_LIST.3
index aefa717ba74a4..3e9d1a59df4d4 100644
--- a/secure/lib/libcrypto/man/man3/o2i_SCT_LIST.3
+++ b/secure/lib/libcrypto/man/man3/o2i_SCT_LIST.3
@@ -1,4 +1,4 @@
-.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.39)
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
.\"
.\" Standard preamble:
.\" ========================================================================
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "O2I_SCT_LIST 3"
-.TH O2I_SCT_LIST 3 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH O2I_SCT_LIST 3 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man5/x509v3_config.5 b/secure/lib/libcrypto/man/man5/x509v3_config.5
index c0fda6e92019f..310eeadead33f 100644
--- a/secure/lib/libcrypto/man/man5/x509v3_config.5
+++ b/secure/lib/libcrypto/man/man5/x509v3_config.5
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509V3_CONFIG 5"
-.TH X509V3_CONFIG 5 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509V3_CONFIG 5 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man7/Ed25519.7 b/secure/lib/libcrypto/man/man7/Ed25519.7
index 20fe73d686ec4..d6599c45c1b12 100644
--- a/secure/lib/libcrypto/man/man7/Ed25519.7
+++ b/secure/lib/libcrypto/man/man7/Ed25519.7
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "ED25519 7"
-.TH ED25519 7 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH ED25519 7 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
@@ -145,7 +145,7 @@ Ed25519, Ed448 \&\- EVP_PKEY Ed25519 and Ed448 support
The \fBEd25519\fR and \fBEd448\fR \s-1EVP_PKEY\s0 implementation supports key generation,
one-shot digest sign and digest verify using PureEdDSA and \fBEd25519\fR or \fBEd448\fR
(see \s-1RFC8032\s0). It has associated private and public key formats compatible with
-draft\-ietf\-curdle\-pkix\-04.
+\&\s-1RFC 8410.\s0
.PP
No additional parameters can be set during key generation, one-shot signing or
verification. In particular, because PureEdDSA is used, a digest must \fB\s-1NOT\s0\fR be
@@ -213,7 +213,7 @@ output in \s-1PEM\s0 format:
\&\fBEVP_DigestVerifyInit\fR\|(3),
.SH "COPYRIGHT"
.IX Header "COPYRIGHT"
-Copyright 2017\-2019 The OpenSSL Project Authors. All Rights Reserved.
+Copyright 2017\-2020 The OpenSSL Project Authors. All Rights Reserved.
.PP
Licensed under the OpenSSL license (the \*(L"License\*(R"). You may not use
this file except in compliance with the License. You can obtain a copy
diff --git a/secure/lib/libcrypto/man/man7/Makefile b/secure/lib/libcrypto/man/man7/Makefile
index 34c0ff786be68..a9de694537fb7 100644
--- a/secure/lib/libcrypto/man/man7/Makefile
+++ b/secure/lib/libcrypto/man/man7/Makefile
@@ -13,6 +13,7 @@ MAN+= evp.7
MAN+= ossl_store-file.7
MAN+= ossl_store.7
MAN+= passphrase-encoding.7
+MAN+= proxy-certificates.7
MAN+= scrypt.7
MAN+= ssl.7
MAN+= x509.7
diff --git a/secure/lib/libcrypto/man/man7/RAND.7 b/secure/lib/libcrypto/man/man7/RAND.7
index 9352e577af896..08e278914950d 100644
--- a/secure/lib/libcrypto/man/man7/RAND.7
+++ b/secure/lib/libcrypto/man/man7/RAND.7
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "RAND 7"
-.TH RAND 7 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH RAND 7 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man7/RAND_DRBG.7 b/secure/lib/libcrypto/man/man7/RAND_DRBG.7
index aab4f1dbf8568..af6d6f32f8609 100644
--- a/secure/lib/libcrypto/man/man7/RAND_DRBG.7
+++ b/secure/lib/libcrypto/man/man7/RAND_DRBG.7
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "RAND_DRBG 7"
-.TH RAND_DRBG 7 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH RAND_DRBG 7 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man7/RSA-PSS.7 b/secure/lib/libcrypto/man/man7/RSA-PSS.7
index 6be71bad5ba0e..c298e21625a54 100644
--- a/secure/lib/libcrypto/man/man7/RSA-PSS.7
+++ b/secure/lib/libcrypto/man/man7/RSA-PSS.7
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "RSA-PSS 7"
-.TH RSA-PSS 7 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH RSA-PSS 7 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man7/SM2.7 b/secure/lib/libcrypto/man/man7/SM2.7
index 228278eb46f08..a96f85d76a312 100644
--- a/secure/lib/libcrypto/man/man7/SM2.7
+++ b/secure/lib/libcrypto/man/man7/SM2.7
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SM2 7"
-.TH SM2 7 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SM2 7 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man7/X25519.7 b/secure/lib/libcrypto/man/man7/X25519.7
index 4fcedf08dddde..0356a8755976e 100644
--- a/secure/lib/libcrypto/man/man7/X25519.7
+++ b/secure/lib/libcrypto/man/man7/X25519.7
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X25519 7"
-.TH X25519 7 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X25519 7 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
@@ -144,7 +144,7 @@ X25519, X448 \&\- EVP_PKEY X25519 and X448 support
.IX Header "DESCRIPTION"
The \fBX25519\fR and \fBX448\fR \s-1EVP_PKEY\s0 implementation supports key generation and
key derivation using \fBX25519\fR and \fBX448\fR. It has associated private and public
-key formats compatible with draft\-ietf\-curdle\-pkix\-03.
+key formats compatible with \s-1RFC 8410.\s0
.PP
No additional parameters can be set during key generation.
.PP
@@ -200,7 +200,7 @@ The key derivation example in \fBEVP_PKEY_derive\fR\|(3) can be used with
\&\fBEVP_PKEY_derive_set_peer\fR\|(3)
.SH "COPYRIGHT"
.IX Header "COPYRIGHT"
-Copyright 2017\-2019 The OpenSSL Project Authors. All Rights Reserved.
+Copyright 2017\-2020 The OpenSSL Project Authors. All Rights Reserved.
.PP
Licensed under the OpenSSL license (the \*(L"License\*(R"). You may not use
this file except in compliance with the License. You can obtain a copy
diff --git a/secure/lib/libcrypto/man/man7/bio.7 b/secure/lib/libcrypto/man/man7/bio.7
index 2d16323f72a20..3917a5ae98666 100644
--- a/secure/lib/libcrypto/man/man7/bio.7
+++ b/secure/lib/libcrypto/man/man7/bio.7
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "BIO 7"
-.TH BIO 7 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH BIO 7 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man7/ct.7 b/secure/lib/libcrypto/man/man7/ct.7
index f8005393a6079..08bd44e7d99ae 100644
--- a/secure/lib/libcrypto/man/man7/ct.7
+++ b/secure/lib/libcrypto/man/man7/ct.7
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "CT 7"
-.TH CT 7 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH CT 7 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man7/des_modes.7 b/secure/lib/libcrypto/man/man7/des_modes.7
index da9a50aefbb38..6d4a39f7a28ff 100644
--- a/secure/lib/libcrypto/man/man7/des_modes.7
+++ b/secure/lib/libcrypto/man/man7/des_modes.7
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "DES_MODES 7"
-.TH DES_MODES 7 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH DES_MODES 7 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man7/evp.7 b/secure/lib/libcrypto/man/man7/evp.7
index 165b60f35b201..dd07e03d6803f 100644
--- a/secure/lib/libcrypto/man/man7/evp.7
+++ b/secure/lib/libcrypto/man/man7/evp.7
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "EVP 7"
-.TH EVP 7 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH EVP 7 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man7/ossl_store-file.7 b/secure/lib/libcrypto/man/man7/ossl_store-file.7
index 3185ac380bbf5..07b50c85b9242 100644
--- a/secure/lib/libcrypto/man/man7/ossl_store-file.7
+++ b/secure/lib/libcrypto/man/man7/ossl_store-file.7
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "OSSL_STORE-FILE 7"
-.TH OSSL_STORE-FILE 7 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH OSSL_STORE-FILE 7 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man7/ossl_store.7 b/secure/lib/libcrypto/man/man7/ossl_store.7
index c8ab5a70b3df2..805046c4df97a 100644
--- a/secure/lib/libcrypto/man/man7/ossl_store.7
+++ b/secure/lib/libcrypto/man/man7/ossl_store.7
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "OSSL_STORE 7"
-.TH OSSL_STORE 7 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH OSSL_STORE 7 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man7/passphrase-encoding.7 b/secure/lib/libcrypto/man/man7/passphrase-encoding.7
index a9975f240b42a..f664ee7f9c4b6 100644
--- a/secure/lib/libcrypto/man/man7/passphrase-encoding.7
+++ b/secure/lib/libcrypto/man/man7/passphrase-encoding.7
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "PASSPHRASE-ENCODING 7"
-.TH PASSPHRASE-ENCODING 7 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH PASSPHRASE-ENCODING 7 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
@@ -188,7 +188,7 @@ A pass phrase encoded in \s-1ISO\-8859\-2\s0 could very well have a sequence suc
0xC3 0xAF (which is the two characters \*(L"\s-1LATIN CAPITAL LETTER A WITH BREVE\*(R"\s0
and \*(L"\s-1LATIN CAPITAL LETTER Z WITH DOT ABOVE\*(R"\s0 in \s-1ISO\-8859\-2\s0 encoding), but would
be misinterpreted as the perfectly valid \s-1UTF\-8\s0 encoded code point U+00EF (\s-1LATIN
-SMALL LETTER I WITH DIARESIS\s0) \fIif the pass phrase doesn't contain anything that
+SMALL LETTER I WITH DIAERESIS\s0) \fIif the pass phrase doesn't contain anything that
would be invalid \s-1UTF\-8\s0\fR.
A pass phrase that contains this kind of byte sequence will give a different
outcome in OpenSSL 1.1.0 and newer than in OpenSSL older than 1.1.0.
@@ -279,7 +279,7 @@ erroneous/non\-compliant encoding used by OpenSSL older than 1.1.0)
\&\fBd2i_PKCS8PrivateKey_bio\fR\|(3)
.SH "COPYRIGHT"
.IX Header "COPYRIGHT"
-Copyright 2018 The OpenSSL Project Authors. All Rights Reserved.
+Copyright 2018\-2020 The OpenSSL Project Authors. All Rights Reserved.
.PP
Licensed under the OpenSSL license (the \*(L"License\*(R"). You may not use
this file except in compliance with the License. You can obtain a copy
diff --git a/secure/lib/libcrypto/man/man7/proxy-certificates.7 b/secure/lib/libcrypto/man/man7/proxy-certificates.7
new file mode 100644
index 0000000000000..41ed591876931
--- /dev/null
+++ b/secure/lib/libcrypto/man/man7/proxy-certificates.7
@@ -0,0 +1,478 @@
+.\" Automatically generated by Pod::Man 4.11 (Pod::Simple 3.40)
+.\"
+.\" Standard preamble:
+.\" ========================================================================
+.de Sp \" Vertical space (when we can't use .PP)
+.if t .sp .5v
+.if n .sp
+..
+.de Vb \" Begin verbatim text
+.ft CW
+.nf
+.ne \\$1
+..
+.de Ve \" End verbatim text
+.ft R
+.fi
+..
+.\" Set up some character translations and predefined strings. \*(-- will
+.\" give an unbreakable dash, \*(PI will give pi, \*(L" will give a left
+.\" double quote, and \*(R" will give a right double quote. \*(C+ will
+.\" give a nicer C++. Capital omega is used to do unbreakable dashes and
+.\" therefore won't be available. \*(C` and \*(C' expand to `' in nroff,
+.\" nothing in troff, for use with C<>.
+.tr \(*W-
+.ds C+ C\v'-.1v'\h'-1p'\s-2+\h'-1p'+\s0\v'.1v'\h'-1p'
+.ie n \{\
+. ds -- \(*W-
+. ds PI pi
+. if (\n(.H=4u)&(1m=24u) .ds -- \(*W\h'-12u'\(*W\h'-12u'-\" diablo 10 pitch
+. if (\n(.H=4u)&(1m=20u) .ds -- \(*W\h'-12u'\(*W\h'-8u'-\" diablo 12 pitch
+. ds L" ""
+. ds R" ""
+. ds C` ""
+. ds C' ""
+'br\}
+.el\{\
+. ds -- \|\(em\|
+. ds PI \(*p
+. ds L" ``
+. ds R" ''
+. ds C`
+. ds C'
+'br\}
+.\"
+.\" Escape single quotes in literal strings from groff's Unicode transform.
+.ie \n(.g .ds Aq \(aq
+.el .ds Aq '
+.\"
+.\" If the F register is >0, we'll generate index entries on stderr for
+.\" titles (.TH), headers (.SH), subsections (.SS), items (.Ip), and index
+.\" entries marked with X<> in POD. Of course, you'll have to process the
+.\" output yourself in some meaningful fashion.
+.\"
+.\" Avoid warning from groff about undefined register 'F'.
+.de IX
+..
+.nr rF 0
+.if \n(.g .if rF .nr rF 1
+.if (\n(rF:(\n(.g==0)) \{\
+. if \nF \{\
+. de IX
+. tm Index:\\$1\t\\n%\t"\\$2"
+..
+. if !\nF==2 \{\
+. nr % 0
+. nr F 2
+. \}
+. \}
+.\}
+.rr rF
+.\"
+.\" Accent mark definitions (@(#)ms.acc 1.5 88/02/08 SMI; from UCB 4.2).
+.\" Fear. Run. Save yourself. No user-serviceable parts.
+. \" fudge factors for nroff and troff
+.if n \{\
+. ds #H 0
+. ds #V .8m
+. ds #F .3m
+. ds #[ \f1
+. ds #] \fP
+.\}
+.if t \{\
+. ds #H ((1u-(\\\\n(.fu%2u))*.13m)
+. ds #V .6m
+. ds #F 0
+. ds #[ \&
+. ds #] \&
+.\}
+. \" simple accents for nroff and troff
+.if n \{\
+. ds ' \&
+. ds ` \&
+. ds ^ \&
+. ds , \&
+. ds ~ ~
+. ds /
+.\}
+.if t \{\
+. ds ' \\k:\h'-(\\n(.wu*8/10-\*(#H)'\'\h"|\\n:u"
+. ds ` \\k:\h'-(\\n(.wu*8/10-\*(#H)'\`\h'|\\n:u'
+. ds ^ \\k:\h'-(\\n(.wu*10/11-\*(#H)'^\h'|\\n:u'
+. ds , \\k:\h'-(\\n(.wu*8/10)',\h'|\\n:u'
+. ds ~ \\k:\h'-(\\n(.wu-\*(#H-.1m)'~\h'|\\n:u'
+. ds / \\k:\h'-(\\n(.wu*8/10-\*(#H)'\z\(sl\h'|\\n:u'
+.\}
+. \" troff and (daisy-wheel) nroff accents
+.ds : \\k:\h'-(\\n(.wu*8/10-\*(#H+.1m+\*(#F)'\v'-\*(#V'\z.\h'.2m+\*(#F'.\h'|\\n:u'\v'\*(#V'
+.ds 8 \h'\*(#H'\(*b\h'-\*(#H'
+.ds o \\k:\h'-(\\n(.wu+\w'\(de'u-\*(#H)/2u'\v'-.3n'\*(#[\z\(de\v'.3n'\h'|\\n:u'\*(#]
+.ds d- \h'\*(#H'\(pd\h'-\w'~'u'\v'-.25m'\f2\(hy\fP\v'.25m'\h'-\*(#H'
+.ds D- D\\k:\h'-\w'D'u'\v'-.11m'\z\(hy\v'.11m'\h'|\\n:u'
+.ds th \*(#[\v'.3m'\s+1I\s-1\v'-.3m'\h'-(\w'I'u*2/3)'\s-1o\s+1\*(#]
+.ds Th \*(#[\s+2I\s-2\h'-\w'I'u*3/5'\v'-.3m'o\v'.3m'\*(#]
+.ds ae a\h'-(\w'a'u*4/10)'e
+.ds Ae A\h'-(\w'A'u*4/10)'E
+. \" corrections for vroff
+.if v .ds ~ \\k:\h'-(\\n(.wu*9/10-\*(#H)'\s-2\u~\d\s+2\h'|\\n:u'
+.if v .ds ^ \\k:\h'-(\\n(.wu*10/11-\*(#H)'\v'-.4m'^\v'.4m'\h'|\\n:u'
+. \" for low resolution devices (crt and lpr)
+.if \n(.H>23 .if \n(.V>19 \
+\{\
+. ds : e
+. ds 8 ss
+. ds o a
+. ds d- d\h'-1'\(ga
+. ds D- D\h'-1'\(hy
+. ds th \o'bp'
+. ds Th \o'LP'
+. ds ae ae
+. ds Ae AE
+.\}
+.rm #[ #] #H #V #F C
+.\" ========================================================================
+.\"
+.IX Title "PROXY-CERTIFICATES 7"
+.TH PROXY-CERTIFICATES 7 "2020-03-17" "1.1.1e" "OpenSSL"
+.\" For nroff, turn off justification. Always turn off hyphenation; it makes
+.\" way too many mistakes in technical documents.
+.if n .ad l
+.nh
+.SH "NAME"
+proxy\-certificates \- Proxy certificates in OpenSSL
+.SH "DESCRIPTION"
+.IX Header "DESCRIPTION"
+Proxy certificates are defined in \s-1RFC 3820.\s0 They are used to
+extend rights to some other entity (a computer process, typically, or
+sometimes to the user itself). This allows the entity to perform
+operations on behalf of the owner of the \s-1EE\s0 (End Entity) certificate.
+.PP
+The requirements for a valid proxy certificate are:
+.IP "\(bu" 4
+They are issued by an End Entity, either a normal \s-1EE\s0 certificate, or
+another proxy certificate.
+.IP "\(bu" 4
+They must not have the \fBsubjectAltName\fR or \fBissuerAltName\fR
+extensions.
+.IP "\(bu" 4
+They must have the \fBproxyCertInfo\fR extension.
+.IP "\(bu" 4
+They must have the subject of their issuer, with one \fBcommonName\fR
+added.
+.SS "Enabling proxy certificate verification"
+.IX Subsection "Enabling proxy certificate verification"
+OpenSSL expects applications that want to use proxy certificates to be
+specially aware of them, and make that explicit. This is done by
+setting an X509 verification flag:
+.PP
+.Vb 1
+\& X509_STORE_CTX_set_flags(ctx, X509_V_FLAG_ALLOW_PROXY_CERTS);
+.Ve
+.PP
+or
+.PP
+.Vb 1
+\& X509_VERIFY_PARAM_set_flags(param, X509_V_FLAG_ALLOW_PROXY_CERTS);
+.Ve
+.PP
+See \*(L"\s-1NOTES\*(R"\s0 for a discussion on this requirement.
+.SS "Creating proxy certificates"
+.IX Subsection "Creating proxy certificates"
+Creating proxy certificates can be done using the \fBopenssl\-x509\fR\|(1)
+command, with some extra extensions:
+.PP
+.Vb 3
+\& [ v3_proxy ]
+\& # A proxy certificate MUST NEVER be a CA certificate.
+\& basicConstraints=CA:FALSE
+\&
+\& # Usual authority key ID
+\& authorityKeyIdentifier=keyid,issuer:always
+\&
+\& # The extension which marks this certificate as a proxy
+\& proxyCertInfo=critical,language:id\-ppl\-anyLanguage,pathlen:1,policy:text:AB
+.Ve
+.PP
+It's also possible to specify the proxy extension in a separate section:
+.PP
+.Vb 1
+\& proxyCertInfo=critical,@proxy_ext
+\&
+\& [ proxy_ext ]
+\& language=id\-ppl\-anyLanguage
+\& pathlen=0
+\& policy=text:BC
+.Ve
+.PP
+The policy value has a specific syntax, \fIsyntag\fR:\fIstring\fR, where the
+\&\fIsyntag\fR determines what will be done with the string. The following
+\&\fIsyntag\fRs are recognised:
+.IP "\fBtext\fR" 4
+.IX Item "text"
+indicates that the string is a byte sequence, without any encoding:
+.Sp
+.Vb 1
+\& policy=text:ra\*:ksmo\*:rga\*os
+.Ve
+.IP "\fBhex\fR" 4
+.IX Item "hex"
+indicates the string is encoded hexadecimal encoded binary data, with
+colons between each byte (every second hex digit):
+.Sp
+.Vb 1
+\& policy=hex:72:E4:6B:73:6D:F6:72:67:E5:73
+.Ve
+.IP "\fBfile\fR" 4
+.IX Item "file"
+indicates that the text of the policy should be taken from a file.
+The string is then a filename. This is useful for policies that are
+large (more than a few lines, e.g. \s-1XML\s0 documents).
+.PP
+\&\fI\s-1NOTE:\s0 The proxy policy value is what determines the rights granted
+to the process during the proxy certificate. It's up to the
+application to interpret and combine these policies.\fR
+.PP
+With a proxy extension, creating a proxy certificate is a matter of
+two commands:
+.PP
+.Vb 3
+\& openssl req \-new \-config proxy.cnf \e
+\& \-out proxy.req \-keyout proxy.key \e
+\& \-subj "/DC=org/DC=openssl/DC=users/CN=proxy 1"
+\&
+\& openssl x509 \-req \-CAcreateserial \-in proxy.req \-out proxy.crt \e
+\& \-CA user.crt \-CAkey user.key \-days 7 \e
+\& \-extfile proxy.cnf \-extensions v3_proxy1
+.Ve
+.PP
+You can also create a proxy certificate using another proxy
+certificate as issuer (note: using a different configuration
+section for the proxy extensions):
+.PP
+.Vb 3
+\& openssl req \-new \-config proxy.cnf \e
+\& \-out proxy2.req \-keyout proxy2.key \e
+\& \-subj "/DC=org/DC=openssl/DC=users/CN=proxy 1/CN=proxy 2"
+\&
+\& openssl x509 \-req \-CAcreateserial \-in proxy2.req \-out proxy2.crt \e
+\& \-CA proxy.crt \-CAkey proxy.key \-days 7 \e
+\& \-extfile proxy.cnf \-extensions v3_proxy2
+.Ve
+.SS "Using proxy certs in applications"
+.IX Subsection "Using proxy certs in applications"
+To interpret proxy policies, the application would normally start with
+some default rights (perhaps none at all), then compute the resulting
+rights by checking the rights against the chain of proxy certificates,
+user certificate and \s-1CA\s0 certificates.
+.PP
+The complicated part is figuring out how to pass data between your
+application and the certificate validation procedure.
+.PP
+The following ingredients are needed for such processing:
+.IP "\(bu" 4
+a callback function that will be called for every certificate being
+validated. The callback is called several times for each certificate,
+so you must be careful to do the proxy policy interpretation at the
+right time. You also need to fill in the defaults when the \s-1EE\s0
+certificate is checked.
+.IP "\(bu" 4
+a data structure that is shared between your application code and the
+callback.
+.IP "\(bu" 4
+a wrapper function that sets it all up.
+.IP "\(bu" 4
+an ex_data index function that creates an index into the generic
+ex_data store that is attached to an X509 validation context.
+.PP
+The following skeleton code can be used as a starting point:
+.PP
+.Vb 4
+\& #include <string.h>
+\& #include <netdb.h>
+\& #include <openssl/x509.h>
+\& #include <openssl/x509v3.h>
+\&
+\& #define total_rights 25
+\&
+\& /*
+\& * In this example, I will use a view of granted rights as a bit
+\& * array, one bit for each possible right.
+\& */
+\& typedef struct your_rights {
+\& unsigned char rights[(total_rights + 7) / 8];
+\& } YOUR_RIGHTS;
+\&
+\& /*
+\& * The following procedure will create an index for the ex_data
+\& * store in the X509 validation context the first time it\*(Aqs
+\& * called. Subsequent calls will return the same index.
+\& */
+\& static int get_proxy_auth_ex_data_idx(X509_STORE_CTX *ctx)
+\& {
+\& static volatile int idx = \-1;
+\&
+\& if (idx < 0) {
+\& X509_STORE_lock(X509_STORE_CTX_get0_store(ctx));
+\& if (idx < 0) {
+\& idx = X509_STORE_CTX_get_ex_new_index(0,
+\& "for verify callback",
+\& NULL,NULL,NULL);
+\& }
+\& X509_STORE_unlock(X509_STORE_CTX_get0_store(ctx));
+\& }
+\& return idx;
+\& }
+\&
+\& /* Callback to be given to the X509 validation procedure. */
+\& static int verify_callback(int ok, X509_STORE_CTX *ctx)
+\& {
+\& if (ok == 1) {
+\& /*
+\& * It\*(Aqs REALLY important you keep the proxy policy check
+\& * within this section. It\*(Aqs important to know that when
+\& * ok is 1, the certificates are checked from top to
+\& * bottom. You get the CA root first, followed by the
+\& * possible chain of intermediate CAs, followed by the EE
+\& * certificate, followed by the possible proxy
+\& * certificates.
+\& */
+\& X509 *xs = X509_STORE_CTX_get_current_cert(ctx);
+\&
+\& if (X509_get_extension_flags(xs) & EXFLAG_PROXY) {
+\& YOUR_RIGHTS *rights =
+\& (YOUR_RIGHTS *)X509_STORE_CTX_get_ex_data(ctx,
+\& get_proxy_auth_ex_data_idx(ctx));
+\& PROXY_CERT_INFO_EXTENSION *pci =
+\& X509_get_ext_d2i(xs, NID_proxyCertInfo, NULL, NULL);
+\&
+\& switch (OBJ_obj2nid(pci\->proxyPolicy\->policyLanguage)) {
+\& case NID_Independent:
+\& /*
+\& * Do whatever you need to grant explicit rights
+\& * to this particular proxy certificate, usually
+\& * by pulling them from some database. If there
+\& * are none to be found, clear all rights (making
+\& * this and any subsequent proxy certificate void
+\& * of any rights).
+\& */
+\& memset(rights\->rights, 0, sizeof(rights\->rights));
+\& break;
+\& case NID_id_ppl_inheritAll:
+\& /*
+\& * This is basically a NOP, we simply let the
+\& * current rights stand as they are.
+\& */
+\& break;
+\& default:
+\& /*
+\& * This is usually the most complex section of
+\& * code. You really do whatever you want as long
+\& * as you follow RFC 3820. In the example we use
+\& * here, the simplest thing to do is to build
+\& * another, temporary bit array and fill it with
+\& * the rights granted by the current proxy
+\& * certificate, then use it as a mask on the
+\& * accumulated rights bit array, and voila\*`, you
+\& * now have a new accumulated rights bit array.
+\& */
+\& {
+\& int i;
+\& YOUR_RIGHTS tmp_rights;
+\& memset(tmp_rights.rights, 0,
+\& sizeof(tmp_rights.rights));
+\&
+\& /*
+\& * process_rights() is supposed to be a
+\& * procedure that takes a string and its
+\& * length, interprets it and sets the bits
+\& * in the YOUR_RIGHTS pointed at by the
+\& * third argument.
+\& */
+\& process_rights((char *) pci\->proxyPolicy\->policy\->data,
+\& pci\->proxyPolicy\->policy\->length,
+\& &tmp_rights);
+\&
+\& for(i = 0; i < total_rights / 8; i++)
+\& rights\->rights[i] &= tmp_rights.rights[i];
+\& }
+\& break;
+\& }
+\& PROXY_CERT_INFO_EXTENSION_free(pci);
+\& } else if (!(X509_get_extension_flags(xs) & EXFLAG_CA)) {
+\& /* We have an EE certificate, let\*(Aqs use it to set default! */
+\& YOUR_RIGHTS *rights =
+\& (YOUR_RIGHTS *)X509_STORE_CTX_get_ex_data(ctx,
+\& get_proxy_auth_ex_data_idx(ctx));
+\&
+\& /*
+\& * The following procedure finds out what rights the
+\& * owner of the current certificate has, and sets them
+\& * in the YOUR_RIGHTS structure pointed at by the
+\& * second argument.
+\& */
+\& set_default_rights(xs, rights);
+\& }
+\& }
+\& return ok;
+\& }
+\&
+\& static int my_X509_verify_cert(X509_STORE_CTX *ctx,
+\& YOUR_RIGHTS *needed_rights)
+\& {
+\& int ok;
+\& int (*save_verify_cb)(int ok,X509_STORE_CTX *ctx) =
+\& X509_STORE_CTX_get_verify_cb(ctx);
+\& YOUR_RIGHTS rights;
+\&
+\& X509_STORE_CTX_set_verify_cb(ctx, verify_callback);
+\& X509_STORE_CTX_set_ex_data(ctx, get_proxy_auth_ex_data_idx(ctx),
+\& &rights);
+\& X509_STORE_CTX_set_flags(ctx, X509_V_FLAG_ALLOW_PROXY_CERTS);
+\& ok = X509_verify_cert(ctx);
+\&
+\& if (ok == 1) {
+\& ok = check_needed_rights(rights, needed_rights);
+\& }
+\&
+\& X509_STORE_CTX_set_verify_cb(ctx, save_verify_cb);
+\&
+\& return ok;
+\& }
+.Ve
+.PP
+If you use \s-1SSL\s0 or \s-1TLS,\s0 you can easily set up a callback to have the
+certificates checked properly, using the code above:
+.PP
+.Vb 2
+\& SSL_CTX_set_cert_verify_callback(s_ctx, my_X509_verify_cert,
+\& &needed_rights);
+.Ve
+.SH "NOTES"
+.IX Header "NOTES"
+To this date, it seems that proxy certificates have only been used in
+environments that are aware of them, and no one seems to have
+investigated how they can be used or misused outside of such an
+environment.
+.PP
+For that reason, OpenSSL requires that applications aware of proxy
+certificates must also make that explicit.
+.PP
+\&\fBsubjectAltName\fR and \fBissuerAltName\fR are forbidden in proxy
+certificates, and this is enforced in OpenSSL. The subject must be
+the same as the issuer, with one commonName added on.
+.SH "SEE ALSO"
+.IX Header "SEE ALSO"
+\&\fBX509_STORE_CTX_set_flags\fR\|(3),
+\&\fBX509_STORE_CTX_set_verify_cb\fR\|(3),
+\&\fBX509_VERIFY_PARAM_set_flags\fR\|(3),
+\&\fBSSL_CTX_set_cert_verify_callback\fR\|(3),
+\&\fBopenssl\-req\fR\|(1), \fBopenssl\-x509\fR\|(1),
+\&\s-1RFC 3820\s0 <https://tools.ietf.org/html/rfc3820>
+.SH "COPYRIGHT"
+.IX Header "COPYRIGHT"
+Copyright 2019 The OpenSSL Project Authors. All Rights Reserved.
+.PP
+Licensed under the Apache License 2.0 (the \*(L"License\*(R"). You may not use
+this file except in compliance with the License. You can obtain a copy
+in the file \s-1LICENSE\s0 in the source distribution or at
+<https://www.openssl.org/source/license.html>.
diff --git a/secure/lib/libcrypto/man/man7/scrypt.7 b/secure/lib/libcrypto/man/man7/scrypt.7
index fd73e3aa5fa40..6eb5c493e99c4 100644
--- a/secure/lib/libcrypto/man/man7/scrypt.7
+++ b/secure/lib/libcrypto/man/man7/scrypt.7
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SCRYPT 7"
-.TH SCRYPT 7 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SCRYPT 7 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man7/ssl.7 b/secure/lib/libcrypto/man/man7/ssl.7
index 95fcdc10a1165..7f246ee6e190f 100644
--- a/secure/lib/libcrypto/man/man7/ssl.7
+++ b/secure/lib/libcrypto/man/man7/ssl.7
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "SSL 7"
-.TH SSL 7 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH SSL 7 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l
diff --git a/secure/lib/libcrypto/man/man7/x509.7 b/secure/lib/libcrypto/man/man7/x509.7
index 6d493eba9a0c8..c92b476c7e548 100644
--- a/secure/lib/libcrypto/man/man7/x509.7
+++ b/secure/lib/libcrypto/man/man7/x509.7
@@ -133,7 +133,7 @@
.\" ========================================================================
.\"
.IX Title "X509 7"
-.TH X509 7 "2019-09-10" "1.1.1d" "OpenSSL"
+.TH X509 7 "2020-03-17" "1.1.1e" "OpenSSL"
.\" For nroff, turn off justification. Always turn off hyphenation; it makes
.\" way too many mistakes in technical documents.
.if n .ad l